mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-04-16 06:07:06 +02:00
Fix build on Linux 64 bit.
This commit is contained in:
parent
bd16708781
commit
eda22b925f
9 changed files with 71 additions and 55 deletions
|
@ -87,7 +87,7 @@ void AppendServer(
|
|||
if (host.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
list.push_back({
|
||||
list.push_back(tgcalls::RtcServer{
|
||||
.host = host.toStdString(),
|
||||
.port = port,
|
||||
.isTurn = false
|
||||
|
@ -100,7 +100,7 @@ void AppendServer(
|
|||
const auto password = qs(data.vpassword());
|
||||
if (data.is_turn() && !username.isEmpty() && !password.isEmpty()) {
|
||||
const auto pushTurn = [&](const QString &host) {
|
||||
list.push_back({
|
||||
list.push_back(tgcalls::RtcServer{
|
||||
.host = host.toStdString(),
|
||||
.port = port,
|
||||
.login = username.toStdString(),
|
||||
|
@ -143,8 +143,8 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
|
|||
return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse);
|
||||
}
|
||||
|
||||
[[nodiscard]] webrtc::VideoState StartVideoState(bool enabled) {
|
||||
using State = webrtc::VideoState;
|
||||
[[nodiscard]] Webrtc::VideoState StartVideoState(bool enabled) {
|
||||
using State = Webrtc::VideoState;
|
||||
return enabled ? State::Active : State::Inactive;
|
||||
}
|
||||
|
||||
|
@ -159,8 +159,8 @@ Call::Call(
|
|||
, _user(user)
|
||||
, _api(&_user->session().mtp())
|
||||
, _type(type)
|
||||
, _videoIncoming(std::make_unique<webrtc::VideoTrack>(StartVideoState(video)))
|
||||
, _videoOutgoing(std::make_unique<webrtc::VideoTrack>(StartVideoState(video))) {
|
||||
, _videoIncoming(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video)))
|
||||
, _videoOutgoing(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video))) {
|
||||
_discardByTimeoutTimer.setCallback([=] { hangup(); });
|
||||
|
||||
if (_type == Type::Outgoing) {
|
||||
|
@ -345,14 +345,14 @@ void Call::setMuted(bool mute) {
|
|||
void Call::setupOutgoingVideo() {
|
||||
const auto started = _videoOutgoing->state();
|
||||
_videoOutgoing->stateValue(
|
||||
) | rpl::start_with_next([=](webrtc::VideoState state) {
|
||||
) | rpl::start_with_next([=](Webrtc::VideoState state) {
|
||||
if (_state.current() != State::Established
|
||||
&& state != started
|
||||
&& !_videoCapture) {
|
||||
_videoOutgoing->setState(started);
|
||||
} else if (state != webrtc::VideoState::Inactive) {
|
||||
} else if (state != Webrtc::VideoState::Inactive) {
|
||||
// Paused not supported right now.
|
||||
Assert(state == webrtc::VideoState::Active);
|
||||
Assert(state == Webrtc::VideoState::Active);
|
||||
if (!_videoCapture) {
|
||||
_videoCapture = tgcalls::VideoCaptureInterface::Create();
|
||||
_videoCapture->setOutput(_videoOutgoing->sink());
|
||||
|
@ -367,11 +367,11 @@ void Call::setupOutgoingVideo() {
|
|||
}, _lifetime);
|
||||
}
|
||||
|
||||
not_null<webrtc::VideoTrack*> Call::videoIncoming() const {
|
||||
not_null<Webrtc::VideoTrack*> Call::videoIncoming() const {
|
||||
return _videoIncoming.get();
|
||||
}
|
||||
|
||||
not_null<webrtc::VideoTrack*> Call::videoOutgoing() const {
|
||||
not_null<Webrtc::VideoTrack*> Call::videoOutgoing() const {
|
||||
return _videoOutgoing.get();
|
||||
}
|
||||
|
||||
|
@ -589,7 +589,7 @@ void Call::updateRemoteMediaState(
|
|||
}();
|
||||
_videoIncoming->setState([&] {
|
||||
using From = tgcalls::VideoState;
|
||||
using To = webrtc::VideoState;
|
||||
using To = Webrtc::VideoState;
|
||||
switch (video) {
|
||||
case From::Inactive: return To::Inactive;
|
||||
case From::Paused: return To::Paused;
|
||||
|
@ -992,8 +992,8 @@ void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
|
|||
auto duration = getDurationMs() / 1000;
|
||||
auto connectionId = _instance ? _instance->getPreferredRelayId() : 0;
|
||||
_finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); });
|
||||
const auto flags = ((_videoIncoming->state() != webrtc::VideoState::Inactive)
|
||||
|| (_videoOutgoing->state() != webrtc::VideoState::Inactive))
|
||||
const auto flags = ((_videoIncoming->state() != Webrtc::VideoState::Inactive)
|
||||
|| (_videoOutgoing->state() != Webrtc::VideoState::Inactive))
|
||||
? MTPphone_DiscardCall::Flag::f_video
|
||||
: MTPphone_DiscardCall::Flag(0);
|
||||
_api.request(MTPphone_DiscardCall(
|
||||
|
|
|
@ -27,10 +27,10 @@ enum class VideoState;
|
|||
enum class AudioState;
|
||||
} // namespace tgcalls
|
||||
|
||||
namespace webrtc {
|
||||
namespace Webrtc {
|
||||
enum class VideoState;
|
||||
class VideoTrack;
|
||||
} // namespace webrtc
|
||||
} // namespace Webrtc
|
||||
|
||||
namespace Calls {
|
||||
|
||||
|
@ -117,11 +117,11 @@ public:
|
|||
return _remoteAudioState.value();
|
||||
}
|
||||
|
||||
[[nodiscard]] webrtc::VideoState remoteVideoState() const {
|
||||
[[nodiscard]] Webrtc::VideoState remoteVideoState() const {
|
||||
return _remoteVideoState.current();
|
||||
}
|
||||
[[nodiscard]] auto remoteVideoStateValue() const
|
||||
-> rpl::producer<webrtc::VideoState> {
|
||||
-> rpl::producer<Webrtc::VideoState> {
|
||||
return _remoteVideoState.value();
|
||||
}
|
||||
|
||||
|
@ -140,8 +140,8 @@ public:
|
|||
return _muted.value();
|
||||
}
|
||||
|
||||
[[nodiscard]] not_null<webrtc::VideoTrack*> videoIncoming() const;
|
||||
[[nodiscard]] not_null<webrtc::VideoTrack*> videoOutgoing() const;
|
||||
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoIncoming() const;
|
||||
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoOutgoing() const;
|
||||
|
||||
crl::time getDurationMs() const;
|
||||
float64 getWaitingSoundPeakValue() const;
|
||||
|
@ -212,7 +212,7 @@ private:
|
|||
Type _type = Type::Outgoing;
|
||||
rpl::variable<State> _state = State::Starting;
|
||||
rpl::variable<RemoteAudioState> _remoteAudioState = RemoteAudioState::Active;
|
||||
rpl::variable<webrtc::VideoState> _remoteVideoState;
|
||||
rpl::variable<Webrtc::VideoState> _remoteVideoState;
|
||||
FinishType _finishAfterRequestingCall = FinishType::None;
|
||||
bool _answerAfterDhConfigReceived = false;
|
||||
rpl::variable<int> _signalBarCount = kSignalBarStarting;
|
||||
|
@ -236,8 +236,8 @@ private:
|
|||
|
||||
std::unique_ptr<tgcalls::Instance> _instance;
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||
const std::unique_ptr<webrtc::VideoTrack> _videoIncoming;
|
||||
const std::unique_ptr<webrtc::VideoTrack> _videoOutgoing;
|
||||
const std::unique_ptr<Webrtc::VideoTrack> _videoIncoming;
|
||||
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
|
||||
|
||||
std::unique_ptr<Media::Audio::Track> _waitingTrack;
|
||||
|
||||
|
|
|
@ -312,9 +312,9 @@ void Panel::initControls() {
|
|||
_camera->setClickedCallback([=] {
|
||||
if (_call) {
|
||||
_call->videoOutgoing()->setState(
|
||||
(_call->videoOutgoing()->state() == webrtc::VideoState::Active)
|
||||
? webrtc::VideoState::Inactive
|
||||
: webrtc::VideoState::Active);
|
||||
(_call->videoOutgoing()->state() == Webrtc::VideoState::Active)
|
||||
? Webrtc::VideoState::Inactive
|
||||
: Webrtc::VideoState::Active);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -399,8 +399,8 @@ void Panel::reinitWithCall(Call *call) {
|
|||
}, _callLifetime);
|
||||
|
||||
_call->videoOutgoing()->stateValue(
|
||||
) | rpl::start_with_next([=](webrtc::VideoState state) {
|
||||
_camera->setIconOverride((state == webrtc::VideoState::Active)
|
||||
) | rpl::start_with_next([=](Webrtc::VideoState state) {
|
||||
_camera->setIconOverride((state == Webrtc::VideoState::Active)
|
||||
? nullptr
|
||||
: &st::callNoCameraIcon);
|
||||
}, _callLifetime);
|
||||
|
@ -547,7 +547,7 @@ void Panel::initGeometry() {
|
|||
|
||||
void Panel::refreshOutgoingPreviewInBody(State state) {
|
||||
const auto inBody = (state != State::Established)
|
||||
&& (_call->videoOutgoing()->state() != webrtc::VideoState::Inactive)
|
||||
&& (_call->videoOutgoing()->state() != Webrtc::VideoState::Inactive)
|
||||
&& !_call->videoOutgoing()->frameSize().isEmpty();
|
||||
if (_outgoingPreviewInBody == inBody) {
|
||||
return;
|
||||
|
@ -722,7 +722,7 @@ void Panel::paintEvent(QPaintEvent *e) {
|
|||
}
|
||||
|
||||
const auto incomingFrame = _call
|
||||
? _call->videoIncoming()->frame(webrtc::FrameRequest())
|
||||
? _call->videoIncoming()->frame(Webrtc::FrameRequest())
|
||||
: QImage();
|
||||
if (!incomingFrame.isNull()) {
|
||||
const auto to = rect().marginsRemoved(_padding);
|
||||
|
@ -877,7 +877,7 @@ void Panel::stateChanged(State state) {
|
|||
}
|
||||
|
||||
bool Panel::hasActiveVideo() const {
|
||||
const auto inactive = webrtc::VideoState::Inactive;
|
||||
const auto inactive = Webrtc::VideoState::Inactive;
|
||||
return (_call->videoIncoming()->state() != inactive)
|
||||
|| (_call->videoOutgoing()->state() != inactive);
|
||||
}
|
||||
|
|
|
@ -18,10 +18,10 @@ namespace Calls {
|
|||
|
||||
VideoBubble::VideoBubble(
|
||||
not_null<QWidget*> parent,
|
||||
not_null<webrtc::VideoTrack*> track)
|
||||
not_null<Webrtc::VideoTrack*> track)
|
||||
: _content(parent)
|
||||
, _track(track)
|
||||
, _state(webrtc::VideoState::Inactive) {
|
||||
, _state(Webrtc::VideoState::Inactive) {
|
||||
setup();
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@ void VideoBubble::setup() {
|
|||
}, lifetime());
|
||||
|
||||
_track->stateValue(
|
||||
) | rpl::start_with_next([=](webrtc::VideoState state) {
|
||||
) | rpl::start_with_next([=](Webrtc::VideoState state) {
|
||||
setState(state);
|
||||
}, lifetime());
|
||||
|
||||
|
@ -137,7 +137,7 @@ void VideoBubble::prepareFrame() {
|
|||
* cIntRetinaFactor();
|
||||
|
||||
// Should we check 'original' and 'size' aspect ratios?..
|
||||
const auto request = webrtc::FrameRequest{
|
||||
const auto request = Webrtc::FrameRequest{
|
||||
.resize = size,
|
||||
.outer = size,
|
||||
};
|
||||
|
@ -165,13 +165,13 @@ void VideoBubble::prepareFrame() {
|
|||
QRect(QPoint(), size));
|
||||
}
|
||||
|
||||
void VideoBubble::setState(webrtc::VideoState state) {
|
||||
if (state == webrtc::VideoState::Paused) {
|
||||
void VideoBubble::setState(Webrtc::VideoState state) {
|
||||
if (state == Webrtc::VideoState::Paused) {
|
||||
using namespace Images;
|
||||
static constexpr auto kRadius = 24;
|
||||
_pausedFrame = Images::BlurLargeImage(_track->frame({}), kRadius);
|
||||
if (_pausedFrame.isNull()) {
|
||||
state = webrtc::VideoState::Inactive;
|
||||
state = Webrtc::VideoState::Inactive;
|
||||
}
|
||||
}
|
||||
_state = state;
|
||||
|
@ -240,7 +240,7 @@ void VideoBubble::setInnerSize(QSize size) {
|
|||
|
||||
void VideoBubble::updateVisibility() {
|
||||
const auto size = _track->frameSize();
|
||||
const auto visible = (_state != webrtc::VideoState::Inactive)
|
||||
const auto visible = (_state != Webrtc::VideoState::Inactive)
|
||||
&& !size.isEmpty();
|
||||
if (visible) {
|
||||
updateSizeToFrame(size);
|
||||
|
|
|
@ -9,10 +9,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
|
||||
#include "ui/rp_widget.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace Webrtc {
|
||||
class VideoTrack;
|
||||
enum class VideoState;
|
||||
} // namespace webrtc
|
||||
} // namespace Webrtc
|
||||
|
||||
namespace Calls {
|
||||
|
||||
|
@ -20,7 +20,7 @@ class VideoBubble final {
|
|||
public:
|
||||
VideoBubble(
|
||||
not_null<QWidget*> parent,
|
||||
not_null<webrtc::VideoTrack*> track);
|
||||
not_null<Webrtc::VideoTrack*> track);
|
||||
|
||||
enum class DragMode {
|
||||
None,
|
||||
|
@ -39,7 +39,7 @@ public:
|
|||
private:
|
||||
void setup();
|
||||
void paint();
|
||||
void setState(webrtc::VideoState state);
|
||||
void setState(Webrtc::VideoState state);
|
||||
void applyDragMode(DragMode mode);
|
||||
void applyBoundingRect(QRect rect);
|
||||
void applySizeConstraints(QSize min, QSize max);
|
||||
|
@ -49,8 +49,8 @@ private:
|
|||
void prepareFrame();
|
||||
|
||||
Ui::RpWidget _content;
|
||||
const not_null<webrtc::VideoTrack*> _track;
|
||||
webrtc::VideoState _state = webrtc::VideoState();
|
||||
const not_null<Webrtc::VideoTrack*> _track;
|
||||
Webrtc::VideoState _state = Webrtc::VideoState();
|
||||
QImage _frame, _pausedFrame;
|
||||
QSize _min, _max, _size, _lastDraggableSize, _lastFrameSize;
|
||||
QRect _boundingRect;
|
||||
|
|
2
Telegram/ThirdParty/tgcalls
vendored
2
Telegram/ThirdParty/tgcalls
vendored
|
@ -1 +1 @@
|
|||
Subproject commit 9de3547711c5bb7056c101fc47e1b912d411c2f4
|
||||
Subproject commit 82172d1b7bef85c4dba984dc0d9e681b23dbc1a7
|
|
@ -5,7 +5,13 @@
|
|||
# https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
|
||||
add_library(lib_tgcalls STATIC)
|
||||
|
||||
if (LINUX)
|
||||
init_target(lib_tgcalls) # All C++20 on Linux, because otherwise ODR violation.
|
||||
else()
|
||||
init_target(lib_tgcalls cxx_std_14)
|
||||
endif()
|
||||
|
||||
add_library(tdesktop::lib_tgcalls ALIAS lib_tgcalls)
|
||||
|
||||
set(tgcalls_dir ${third_party_loc}/tgcalls)
|
||||
|
@ -43,6 +49,8 @@ PRIVATE
|
|||
platform/PlatformInterface.h
|
||||
|
||||
# Teleram Desktop
|
||||
platform/tdesktop/DesktopInterface.cpp
|
||||
platform/tdesktop/DesktopInterface.h
|
||||
platform/tdesktop/VideoCapturerInterfaceImpl.cpp
|
||||
platform/tdesktop/VideoCapturerInterfaceImpl.h
|
||||
platform/tdesktop/VideoCapturerTrackSource.cpp
|
||||
|
@ -50,10 +58,6 @@ PRIVATE
|
|||
platform/tdesktop/VideoCameraCapturer.cpp
|
||||
platform/tdesktop/VideoCameraCapturer.h
|
||||
|
||||
# Windows
|
||||
platform/windows/WindowsInterface.cpp
|
||||
platform/windows/WindowsInterface.h
|
||||
|
||||
# iOS / macOS
|
||||
platform/darwin/DarwinInterface.h
|
||||
platform/darwin/DarwinInterface.mm
|
||||
|
@ -84,8 +88,6 @@ PRIVATE
|
|||
platform/darwin/VideoMetalViewMac.h
|
||||
platform/darwin/VideoMetalViewMac.mm
|
||||
|
||||
# Linux
|
||||
|
||||
# POSIX
|
||||
|
||||
reference/InstanceImplReference.cpp
|
||||
|
@ -121,6 +123,8 @@ elseif (APPLE)
|
|||
platform/darwin/VideoMetalView.mm
|
||||
platform/darwin/VideoMetalViewMac.h
|
||||
platform/darwin/VideoMetalViewMac.mm
|
||||
platform/tdesktop/DesktopInterface.cpp
|
||||
platform/tdesktop/DesktopInterface.h
|
||||
platform/tdesktop/VideoCapturerTrackSource.cpp
|
||||
platform/tdesktop/VideoCapturerTrackSource.h
|
||||
platform/tdesktop/VideoCapturerInterfaceImpl.cpp
|
||||
|
@ -146,7 +150,13 @@ PRIVATE
|
|||
)
|
||||
|
||||
add_library(lib_tgcalls_legacy STATIC)
|
||||
init_target(lib_tgcalls_legacy cxx_std_14)
|
||||
|
||||
if (LINUX)
|
||||
init_target(lib_tgcalls_legacy) # All C++20 on Linux, because otherwise ODR violation.
|
||||
else()
|
||||
init_target(lib_tgcalls_legacy cxx_std_14)
|
||||
endif()
|
||||
|
||||
add_library(tdesktop::lib_tgcalls_legacy ALIAS lib_tgcalls_legacy)
|
||||
|
||||
nice_target_sources(lib_tgcalls_legacy ${tgcalls_loc}
|
||||
|
|
|
@ -14,7 +14,13 @@ if (TDESKTOP_USE_PACKAGED_TGVOIP AND NOT DESKTOP_APP_USE_PACKAGED_LAZY)
|
|||
target_link_libraries(lib_tgvoip INTERFACE PkgConfig::TGVOIP)
|
||||
else()
|
||||
add_library(lib_tgvoip STATIC)
|
||||
init_target(lib_tgvoip cxx_std_14)
|
||||
|
||||
if (LINUX)
|
||||
init_target(lib_tgvoip) # All C++20 on Linux, because otherwise ODR violation.
|
||||
else()
|
||||
init_target(lib_tgvoip cxx_std_14)
|
||||
endif()
|
||||
|
||||
add_library(tdesktop::lib_tgvoip ALIAS lib_tgvoip)
|
||||
|
||||
set(tgvoip_loc ${third_party_loc}/libtgvoip)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit a8e19691c5d653310f7ac5f75d69e45b34771fa4
|
||||
Subproject commit 183b7ae329d57663131c3019a384516f5f51898f
|
Loading…
Add table
Reference in a new issue