mirror of
https://github.com/AyuGram/AyuGramDesktop.git
synced 2025-06-03 21:54:05 +02:00
Improve things for OpenAL devices management.
This commit is contained in:
parent
0945e04f6b
commit
2f40a44b5c
13 changed files with 140 additions and 92 deletions
|
@ -430,20 +430,20 @@ void Call::setMuted(bool mute) {
|
||||||
void Call::setupMediaDevices() {
|
void Call::setupMediaDevices() {
|
||||||
_playbackDeviceId.changes() | rpl::filter([=] {
|
_playbackDeviceId.changes() | rpl::filter([=] {
|
||||||
return _instance && _setDeviceIdCallback;
|
return _instance && _setDeviceIdCallback;
|
||||||
}) | rpl::start_with_next([=](const QString &deviceId) {
|
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
|
||||||
_setDeviceIdCallback(
|
_setDeviceIdCallback(deviceId);
|
||||||
Webrtc::DeviceType::Playback,
|
|
||||||
deviceId);
|
// Value doesn't matter here, just trigger reading of the new value.
|
||||||
_instance->setAudioOutputDevice(deviceId.toStdString());
|
_instance->setAudioOutputDevice(deviceId.value.toStdString());
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
|
|
||||||
_captureDeviceId.changes() | rpl::filter([=] {
|
_captureDeviceId.changes() | rpl::filter([=] {
|
||||||
return _instance && _setDeviceIdCallback;
|
return _instance && _setDeviceIdCallback;
|
||||||
}) | rpl::start_with_next([=](const QString &deviceId) {
|
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
|
||||||
_setDeviceIdCallback(
|
_setDeviceIdCallback(deviceId);
|
||||||
Webrtc::DeviceType::Capture,
|
|
||||||
deviceId);
|
// Value doesn't matter here, just trigger reading of the new value.
|
||||||
_instance->setAudioInputDevice(deviceId.toStdString());
|
_instance->setAudioInputDevice(deviceId.value.toStdString());
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -497,10 +497,11 @@ void Call::setupOutgoingVideo() {
|
||||||
_cameraDeviceId.changes(
|
_cameraDeviceId.changes(
|
||||||
) | rpl::filter([=] {
|
) | rpl::filter([=] {
|
||||||
return !_videoCaptureIsScreencast;
|
return !_videoCaptureIsScreencast;
|
||||||
}) | rpl::start_with_next([=](QString deviceId) {
|
}) | rpl::start_with_next([=](Webrtc::DeviceResolvedId deviceId) {
|
||||||
_videoCaptureDeviceId = deviceId;
|
const auto &id = deviceId.value;
|
||||||
|
_videoCaptureDeviceId = id;
|
||||||
if (_videoCapture) {
|
if (_videoCapture) {
|
||||||
_videoCapture->switchToDevice(deviceId.toStdString(), false);
|
_videoCapture->switchToDevice(id.toStdString(), false);
|
||||||
if (_instance) {
|
if (_instance) {
|
||||||
_instance->sendVideoDeviceUpdated();
|
_instance->sendVideoDeviceUpdated();
|
||||||
}
|
}
|
||||||
|
@ -904,24 +905,25 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
|
||||||
const auto playbackDeviceIdInitial = _playbackDeviceId.current();
|
const auto playbackDeviceIdInitial = _playbackDeviceId.current();
|
||||||
const auto captureDeviceIdInitial = _captureDeviceId.current();
|
const auto captureDeviceIdInitial = _captureDeviceId.current();
|
||||||
const auto saveSetDeviceIdCallback = [=](
|
const auto saveSetDeviceIdCallback = [=](
|
||||||
Fn<void(Webrtc::DeviceType, QString)> setDeviceIdCallback) {
|
Fn<void(Webrtc::DeviceResolvedId)> setDeviceIdCallback) {
|
||||||
setDeviceIdCallback(
|
setDeviceIdCallback(playbackDeviceIdInitial);
|
||||||
Webrtc::DeviceType::Playback,
|
setDeviceIdCallback(captureDeviceIdInitial);
|
||||||
playbackDeviceIdInitial);
|
|
||||||
setDeviceIdCallback(
|
|
||||||
Webrtc::DeviceType::Capture,
|
|
||||||
captureDeviceIdInitial);
|
|
||||||
crl::on_main(weak, [=] {
|
crl::on_main(weak, [=] {
|
||||||
_setDeviceIdCallback = std::move(setDeviceIdCallback);
|
_setDeviceIdCallback = std::move(setDeviceIdCallback);
|
||||||
const auto playback = _playbackDeviceId.current();
|
const auto playback = _playbackDeviceId.current();
|
||||||
if (_instance && playback != playbackDeviceIdInitial) {
|
if (_instance && playback != playbackDeviceIdInitial) {
|
||||||
_setDeviceIdCallback(Webrtc::DeviceType::Playback, playback);
|
_setDeviceIdCallback(playback);
|
||||||
_instance->setAudioOutputDevice(playback.toStdString());
|
|
||||||
|
// Value doesn't matter here, just trigger reading of the...
|
||||||
|
_instance->setAudioOutputDevice(
|
||||||
|
playback.value.toStdString());
|
||||||
}
|
}
|
||||||
const auto capture = _captureDeviceId.current();
|
const auto capture = _captureDeviceId.current();
|
||||||
if (_instance && capture != captureDeviceIdInitial) {
|
if (_instance && capture != captureDeviceIdInitial) {
|
||||||
_setDeviceIdCallback(Webrtc::DeviceType::Capture, capture);
|
_setDeviceIdCallback(capture);
|
||||||
_instance->setAudioInputDevice(capture.toStdString());
|
|
||||||
|
// Value doesn't matter here, just trigger reading of the...
|
||||||
|
_instance->setAudioInputDevice(capture.value.toStdString());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -944,8 +946,8 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
|
||||||
std::move(encryptionKeyValue),
|
std::move(encryptionKeyValue),
|
||||||
(_type == Type::Outgoing)),
|
(_type == Type::Outgoing)),
|
||||||
.mediaDevicesConfig = tgcalls::MediaDevicesConfig{
|
.mediaDevicesConfig = tgcalls::MediaDevicesConfig{
|
||||||
.audioInputId = captureDeviceIdInitial.toStdString(),
|
.audioInputId = captureDeviceIdInitial.value.toStdString(),
|
||||||
.audioOutputId = playbackDeviceIdInitial.toStdString(),
|
.audioOutputId = playbackDeviceIdInitial.value.toStdString(),
|
||||||
.inputVolume = 1.f,//settings.callInputVolume() / 100.f,
|
.inputVolume = 1.f,//settings.callInputVolume() / 100.f,
|
||||||
.outputVolume = 1.f,//settings.callOutputVolume() / 100.f,
|
.outputVolume = 1.f,//settings.callOutputVolume() / 100.f,
|
||||||
},
|
},
|
||||||
|
@ -1223,7 +1225,7 @@ void Call::toggleCameraSharing(bool enabled) {
|
||||||
}
|
}
|
||||||
_delegate->callRequestPermissionsOrFail(crl::guard(this, [=] {
|
_delegate->callRequestPermissionsOrFail(crl::guard(this, [=] {
|
||||||
toggleScreenSharing(std::nullopt);
|
toggleScreenSharing(std::nullopt);
|
||||||
_videoCaptureDeviceId = _cameraDeviceId.current();
|
_videoCaptureDeviceId = _cameraDeviceId.current().value;
|
||||||
if (_videoCapture) {
|
if (_videoCapture) {
|
||||||
_videoCapture->switchToDevice(
|
_videoCapture->switchToDevice(
|
||||||
_videoCaptureDeviceId.toStdString(),
|
_videoCaptureDeviceId.toStdString(),
|
||||||
|
|
|
@ -12,7 +12,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
#include "base/bytes.h"
|
#include "base/bytes.h"
|
||||||
#include "mtproto/sender.h"
|
#include "mtproto/sender.h"
|
||||||
#include "mtproto/mtproto_auth_key.h"
|
#include "mtproto/mtproto_auth_key.h"
|
||||||
#include "webrtc/webrtc_device_id.h"
|
#include "webrtc/webrtc_device_resolver.h"
|
||||||
|
|
||||||
namespace Media {
|
namespace Media {
|
||||||
namespace Audio {
|
namespace Audio {
|
||||||
|
@ -271,10 +271,10 @@ private:
|
||||||
base::DelayedCallTimer _finishByTimeoutTimer;
|
base::DelayedCallTimer _finishByTimeoutTimer;
|
||||||
base::Timer _discardByTimeoutTimer;
|
base::Timer _discardByTimeoutTimer;
|
||||||
|
|
||||||
Fn<void(Webrtc::DeviceType, QString)> _setDeviceIdCallback;
|
Fn<void(Webrtc::DeviceResolvedId)> _setDeviceIdCallback;
|
||||||
Webrtc::DeviceId _playbackDeviceId;
|
Webrtc::DeviceResolver _playbackDeviceId;
|
||||||
Webrtc::DeviceId _captureDeviceId;
|
Webrtc::DeviceResolver _captureDeviceId;
|
||||||
Webrtc::DeviceId _cameraDeviceId;
|
Webrtc::DeviceResolver _cameraDeviceId;
|
||||||
|
|
||||||
rpl::variable<bool> _muted = false;
|
rpl::variable<bool> _muted = false;
|
||||||
|
|
||||||
|
|
|
@ -2066,22 +2066,26 @@ void GroupCall::applyOtherParticipantUpdate(
|
||||||
void GroupCall::setupMediaDevices() {
|
void GroupCall::setupMediaDevices() {
|
||||||
_playbackDeviceId.changes() | rpl::filter([=] {
|
_playbackDeviceId.changes() | rpl::filter([=] {
|
||||||
return _instance && _setDeviceIdCallback;
|
return _instance && _setDeviceIdCallback;
|
||||||
}) | rpl::start_with_next([=](const QString &deviceId) {
|
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
|
||||||
_setDeviceIdCallback(Webrtc::DeviceType::Playback, deviceId);
|
_setDeviceIdCallback(deviceId);
|
||||||
_instance->setAudioOutputDevice(deviceId.toStdString());
|
|
||||||
|
// Value doesn't matter here, just trigger reading of the new value.
|
||||||
|
_instance->setAudioOutputDevice(deviceId.value.toStdString());
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
|
|
||||||
_captureDeviceId.changes() | rpl::filter([=] {
|
_captureDeviceId.changes() | rpl::filter([=] {
|
||||||
return _instance && _setDeviceIdCallback;
|
return _instance && _setDeviceIdCallback;
|
||||||
}) | rpl::start_with_next([=](const QString &deviceId) {
|
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
|
||||||
_setDeviceIdCallback(Webrtc::DeviceType::Capture, deviceId);
|
_setDeviceIdCallback(deviceId);
|
||||||
_instance->setAudioInputDevice(deviceId.toStdString());
|
|
||||||
|
// Value doesn't matter here, just trigger reading of the new value.
|
||||||
|
_instance->setAudioInputDevice(deviceId.value.toStdString());
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
|
|
||||||
_cameraDeviceId.changes() | rpl::filter([=] {
|
_cameraDeviceId.changes() | rpl::filter([=] {
|
||||||
return _cameraCapture != nullptr;
|
return _cameraCapture != nullptr;
|
||||||
}) | rpl::start_with_next([=](const QString &deviceId) {
|
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
|
||||||
_cameraCapture->switchToDevice(deviceId.toStdString(), false);
|
_cameraCapture->switchToDevice(deviceId.value.toStdString(), false);
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2119,7 +2123,7 @@ bool GroupCall::emitShareCameraError() {
|
||||||
return emitError(Error::DisabledNoCamera);
|
return emitError(Error::DisabledNoCamera);
|
||||||
} else if (mutedByAdmin()) {
|
} else if (mutedByAdmin()) {
|
||||||
return emitError(Error::MutedNoCamera);
|
return emitError(Error::MutedNoCamera);
|
||||||
} else if (_cameraDeviceId.current().isEmpty()) {
|
} else if (_cameraDeviceId.current().value.isEmpty()) {
|
||||||
return emitError(Error::NoCamera);
|
return emitError(Error::NoCamera);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -2128,7 +2132,7 @@ bool GroupCall::emitShareCameraError() {
|
||||||
void GroupCall::emitShareCameraError(Error error) {
|
void GroupCall::emitShareCameraError(Error error) {
|
||||||
_cameraState = Webrtc::VideoState::Inactive;
|
_cameraState = Webrtc::VideoState::Inactive;
|
||||||
if (error == Error::CameraFailed
|
if (error == Error::CameraFailed
|
||||||
&& _cameraDeviceId.current().isEmpty()) {
|
&& _cameraDeviceId.current().value.isEmpty()) {
|
||||||
error = Error::NoCamera;
|
error = Error::NoCamera;
|
||||||
}
|
}
|
||||||
_errors.fire_copy(error);
|
_errors.fire_copy(error);
|
||||||
|
@ -2182,7 +2186,7 @@ void GroupCall::setupOutgoingVideo() {
|
||||||
return;
|
return;
|
||||||
} else if (!_cameraCapture) {
|
} else if (!_cameraCapture) {
|
||||||
_cameraCapture = _delegate->groupCallGetVideoCapture(
|
_cameraCapture = _delegate->groupCallGetVideoCapture(
|
||||||
_cameraDeviceId.current());
|
_cameraDeviceId.current().value);
|
||||||
if (!_cameraCapture) {
|
if (!_cameraCapture) {
|
||||||
return emitShareCameraError(Error::CameraFailed);
|
return emitShareCameraError(Error::CameraFailed);
|
||||||
}
|
}
|
||||||
|
@ -2194,7 +2198,7 @@ void GroupCall::setupOutgoingVideo() {
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
_cameraCapture->switchToDevice(
|
_cameraCapture->switchToDevice(
|
||||||
_cameraDeviceId.current().toStdString(),
|
_cameraDeviceId.current().value.toStdString(),
|
||||||
false);
|
false);
|
||||||
}
|
}
|
||||||
if (_instance) {
|
if (_instance) {
|
||||||
|
@ -2343,24 +2347,25 @@ bool GroupCall::tryCreateController() {
|
||||||
const auto playbackDeviceIdInitial = _playbackDeviceId.current();
|
const auto playbackDeviceIdInitial = _playbackDeviceId.current();
|
||||||
const auto captureDeviceIdInitial = _captureDeviceId.current();
|
const auto captureDeviceIdInitial = _captureDeviceId.current();
|
||||||
const auto saveSetDeviceIdCallback = [=](
|
const auto saveSetDeviceIdCallback = [=](
|
||||||
Fn<void(Webrtc::DeviceType, QString)> setDeviceIdCallback) {
|
Fn<void(Webrtc::DeviceResolvedId)> setDeviceIdCallback) {
|
||||||
setDeviceIdCallback(
|
setDeviceIdCallback(playbackDeviceIdInitial);
|
||||||
Webrtc::DeviceType::Playback,
|
setDeviceIdCallback(captureDeviceIdInitial);
|
||||||
playbackDeviceIdInitial);
|
|
||||||
setDeviceIdCallback(
|
|
||||||
Webrtc::DeviceType::Capture,
|
|
||||||
captureDeviceIdInitial);
|
|
||||||
crl::on_main(weak, [=] {
|
crl::on_main(weak, [=] {
|
||||||
_setDeviceIdCallback = std::move(setDeviceIdCallback);
|
_setDeviceIdCallback = std::move(setDeviceIdCallback);
|
||||||
const auto playback = _playbackDeviceId.current();
|
const auto playback = _playbackDeviceId.current();
|
||||||
if (_instance && playback != playbackDeviceIdInitial) {
|
if (_instance && playback != playbackDeviceIdInitial) {
|
||||||
_setDeviceIdCallback(Webrtc::DeviceType::Playback, playback);
|
_setDeviceIdCallback(playback);
|
||||||
_instance->setAudioOutputDevice(playback.toStdString());
|
|
||||||
|
// Value doesn't matter here, just trigger reading of the...
|
||||||
|
_instance->setAudioOutputDevice(
|
||||||
|
playback.value.toStdString());
|
||||||
}
|
}
|
||||||
const auto capture = _captureDeviceId.current();
|
const auto capture = _captureDeviceId.current();
|
||||||
if (_instance && capture != captureDeviceIdInitial) {
|
if (_instance && capture != captureDeviceIdInitial) {
|
||||||
_setDeviceIdCallback(Webrtc::DeviceType::Capture, capture);
|
_setDeviceIdCallback(capture);
|
||||||
_instance->setAudioInputDevice(capture.toStdString());
|
|
||||||
|
// Value doesn't matter here, just trigger reading of the...
|
||||||
|
_instance->setAudioInputDevice(capture.value.toStdString());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -2387,8 +2392,8 @@ bool GroupCall::tryCreateController() {
|
||||||
}
|
}
|
||||||
crl::on_main(weak, [=] { audioLevelsUpdated(data); });
|
crl::on_main(weak, [=] { audioLevelsUpdated(data); });
|
||||||
},
|
},
|
||||||
.initialInputDeviceId = captureDeviceIdInitial.toStdString(),
|
.initialInputDeviceId = captureDeviceIdInitial.value.toStdString(),
|
||||||
.initialOutputDeviceId = playbackDeviceIdInitial.toStdString(),
|
.initialOutputDeviceId = playbackDeviceIdInitial.value.toStdString(),
|
||||||
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
|
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
|
||||||
saveSetDeviceIdCallback),
|
saveSetDeviceIdCallback),
|
||||||
.videoCapture = _cameraCapture,
|
.videoCapture = _cameraCapture,
|
||||||
|
|
|
@ -12,7 +12,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
#include "base/bytes.h"
|
#include "base/bytes.h"
|
||||||
#include "mtproto/sender.h"
|
#include "mtproto/sender.h"
|
||||||
#include "mtproto/mtproto_auth_key.h"
|
#include "mtproto/mtproto_auth_key.h"
|
||||||
#include "webrtc/webrtc_device_id.h"
|
#include "webrtc/webrtc_device_resolver.h"
|
||||||
|
|
||||||
class History;
|
class History;
|
||||||
|
|
||||||
|
@ -667,10 +667,10 @@ private:
|
||||||
|
|
||||||
crl::time _lastSendProgressUpdate = 0;
|
crl::time _lastSendProgressUpdate = 0;
|
||||||
|
|
||||||
Fn<void(Webrtc::DeviceType, QString)> _setDeviceIdCallback;
|
Fn<void(Webrtc::DeviceResolvedId)> _setDeviceIdCallback;
|
||||||
Webrtc::DeviceId _playbackDeviceId;
|
Webrtc::DeviceResolver _playbackDeviceId;
|
||||||
Webrtc::DeviceId _captureDeviceId;
|
Webrtc::DeviceResolver _captureDeviceId;
|
||||||
Webrtc::DeviceId _cameraDeviceId;
|
Webrtc::DeviceResolver _cameraDeviceId;
|
||||||
|
|
||||||
std::shared_ptr<GlobalShortcutManager> _shortcutManager;
|
std::shared_ptr<GlobalShortcutManager> _shortcutManager;
|
||||||
std::shared_ptr<GlobalShortcutValue> _pushToTalk;
|
std::shared_ptr<GlobalShortcutValue> _pushToTalk;
|
||||||
|
|
|
@ -42,6 +42,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
#include "core/application.h"
|
#include "core/application.h"
|
||||||
#include "core/core_settings.h"
|
#include "core/core_settings.h"
|
||||||
#include "webrtc/webrtc_audio_input_tester.h"
|
#include "webrtc/webrtc_audio_input_tester.h"
|
||||||
|
#include "webrtc/webrtc_device_resolver.h"
|
||||||
#include "settings/settings_calls.h"
|
#include "settings/settings_calls.h"
|
||||||
#include "main/main_session.h"
|
#include "main/main_session.h"
|
||||||
#include "apiwrap.h"
|
#include "apiwrap.h"
|
||||||
|
@ -249,7 +250,7 @@ void SettingsBox(
|
||||||
const auto weakBox = Ui::MakeWeak(box);
|
const auto weakBox = Ui::MakeWeak(box);
|
||||||
|
|
||||||
struct State {
|
struct State {
|
||||||
std::unique_ptr<Webrtc::DeviceId> computedDeviceId;
|
std::unique_ptr<Webrtc::DeviceResolver> deviceId;
|
||||||
std::unique_ptr<Webrtc::AudioInputTester> micTester;
|
std::unique_ptr<Webrtc::AudioInputTester> micTester;
|
||||||
Ui::LevelMeter *micTestLevel = nullptr;
|
Ui::LevelMeter *micTestLevel = nullptr;
|
||||||
float micLevel = 0.;
|
float micLevel = 0.;
|
||||||
|
@ -770,14 +771,14 @@ void SettingsBox(
|
||||||
box->setShowFinishedCallback([=] {
|
box->setShowFinishedCallback([=] {
|
||||||
// Means we finished showing the box.
|
// Means we finished showing the box.
|
||||||
crl::on_main(box, [=] {
|
crl::on_main(box, [=] {
|
||||||
state->computedDeviceId = std::make_unique<Webrtc::DeviceId>(
|
state->deviceId = std::make_unique<Webrtc::DeviceResolver>(
|
||||||
&Core::App().mediaDevices(),
|
&Core::App().mediaDevices(),
|
||||||
Webrtc::DeviceType::Capture,
|
Webrtc::DeviceType::Capture,
|
||||||
Webrtc::DeviceIdValueWithFallback(
|
Webrtc::DeviceIdValueWithFallback(
|
||||||
Core::App().settings().callCaptureDeviceIdValue(),
|
Core::App().settings().callCaptureDeviceIdValue(),
|
||||||
Core::App().settings().captureDeviceIdValue()));
|
Core::App().settings().captureDeviceIdValue()));
|
||||||
state->micTester = std::make_unique<Webrtc::AudioInputTester>(
|
state->micTester = std::make_unique<Webrtc::AudioInputTester>(
|
||||||
state->computedDeviceId->value());
|
state->deviceId->value());
|
||||||
state->levelUpdateTimer.callEach(kMicTestUpdateInterval);
|
state->levelUpdateTimer.callEach(kMicTestUpdateInterval);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -884,11 +885,13 @@ std::pair<Fn<void()>, rpl::lifetime> ShareInviteLinkAction(
|
||||||
MicLevelTester::MicLevelTester(Fn<void()> show)
|
MicLevelTester::MicLevelTester(Fn<void()> show)
|
||||||
: _show(std::move(show))
|
: _show(std::move(show))
|
||||||
, _timer([=] { check(); })
|
, _timer([=] { check(); })
|
||||||
, _tester(
|
, _deviceId(std::make_unique<Webrtc::DeviceResolver>(
|
||||||
std::make_unique<Webrtc::AudioInputTester>(
|
&Core::App().mediaDevices(),
|
||||||
Webrtc::DeviceIdValueWithFallback(
|
Webrtc::DeviceType::Capture,
|
||||||
Core::App().settings().callCaptureDeviceIdValue(),
|
Webrtc::DeviceIdValueWithFallback(
|
||||||
Core::App().settings().captureDeviceIdValue()))) {
|
Core::App().settings().callCaptureDeviceIdValue(),
|
||||||
|
Core::App().settings().captureDeviceIdValue())))
|
||||||
|
, _tester(std::make_unique<Webrtc::AudioInputTester>(_deviceId->value())) {
|
||||||
_timer.callEach(kMicrophoneTooltipCheckInterval);
|
_timer.callEach(kMicrophoneTooltipCheckInterval);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
|
|
||||||
namespace Webrtc {
|
namespace Webrtc {
|
||||||
class AudioInputTester;
|
class AudioInputTester;
|
||||||
|
class DeviceResolver;
|
||||||
} // namespace Webrtc
|
} // namespace Webrtc
|
||||||
|
|
||||||
namespace Calls {
|
namespace Calls {
|
||||||
|
@ -38,6 +39,7 @@ private:
|
||||||
|
|
||||||
Fn<void()> _show;
|
Fn<void()> _show;
|
||||||
base::Timer _timer;
|
base::Timer _timer;
|
||||||
|
std::unique_ptr<Webrtc::DeviceResolver> _deviceId;
|
||||||
std::unique_ptr<Webrtc::AudioInputTester> _tester;
|
std::unique_ptr<Webrtc::AudioInputTester> _tester;
|
||||||
int _loudCount = 0;
|
int _loudCount = 0;
|
||||||
int _quietCount = 0;
|
int _quietCount = 0;
|
||||||
|
|
|
@ -39,6 +39,9 @@ constexpr auto kWaveformCounterBufferSize = 256 * 1024;
|
||||||
QMutex AudioMutex;
|
QMutex AudioMutex;
|
||||||
ALCdevice *AudioDevice = nullptr;
|
ALCdevice *AudioDevice = nullptr;
|
||||||
ALCcontext *AudioContext = nullptr;
|
ALCcontext *AudioContext = nullptr;
|
||||||
|
Webrtc::DeviceResolvedId AudioDeviceLastUsedId{
|
||||||
|
.type = Webrtc::DeviceType::Playback
|
||||||
|
};
|
||||||
|
|
||||||
auto VolumeMultiplierAll = 1.;
|
auto VolumeMultiplierAll = 1.;
|
||||||
auto VolumeMultiplierSong = 1.;
|
auto VolumeMultiplierSong = 1.;
|
||||||
|
@ -89,8 +92,12 @@ void DestroyPlaybackDevice() {
|
||||||
bool CreatePlaybackDevice() {
|
bool CreatePlaybackDevice() {
|
||||||
if (AudioDevice) return true;
|
if (AudioDevice) return true;
|
||||||
|
|
||||||
const auto id = Current().playbackDeviceId().toStdString();
|
AudioDeviceLastUsedId = Current().playbackDeviceId();
|
||||||
AudioDevice = alcOpenDevice(id.c_str());
|
|
||||||
|
const auto id = AudioDeviceLastUsedId.isDefault()
|
||||||
|
? std::string()
|
||||||
|
: AudioDeviceLastUsedId.value.toStdString();
|
||||||
|
AudioDevice = alcOpenDevice(id.empty() ? nullptr : id.c_str());
|
||||||
if (!AudioDevice) {
|
if (!AudioDevice) {
|
||||||
LOG(("Audio Error: Could not create default playback device, refreshing.."));
|
LOG(("Audio Error: Could not create default playback device, refreshing.."));
|
||||||
crl::on_main([] {
|
crl::on_main([] {
|
||||||
|
@ -1380,6 +1387,20 @@ void DetachFromDevice(not_null<Audio::Instance*> instance) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool DetachIfDeviceChanged(
|
||||||
|
not_null<Audio::Instance*> instance,
|
||||||
|
const Webrtc::DeviceResolvedId &nowDeviceId) {
|
||||||
|
QMutexLocker lock(&AudioMutex);
|
||||||
|
if (AudioDeviceLastUsedId == nowDeviceId) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
Audio::ClosePlaybackDevice(instance);
|
||||||
|
if (mixer()) {
|
||||||
|
mixer()->reattachIfNeeded();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
|
|
||||||
} // namespace Player
|
} // namespace Player
|
||||||
|
|
|
@ -30,6 +30,10 @@ struct TimePoint;
|
||||||
} // namespace Streaming
|
} // namespace Streaming
|
||||||
} // namespace Media
|
} // namespace Media
|
||||||
|
|
||||||
|
namespace Webrtc {
|
||||||
|
struct DeviceResolvedId;
|
||||||
|
} // namespace Webrtc
|
||||||
|
|
||||||
namespace Media {
|
namespace Media {
|
||||||
namespace Audio {
|
namespace Audio {
|
||||||
|
|
||||||
|
@ -378,6 +382,9 @@ bool CheckAudioDeviceConnected();
|
||||||
|
|
||||||
// Thread: Main. Locks: AudioMutex.
|
// Thread: Main. Locks: AudioMutex.
|
||||||
void DetachFromDevice(not_null<Audio::Instance*> instance);
|
void DetachFromDevice(not_null<Audio::Instance*> instance);
|
||||||
|
bool DetachIfDeviceChanged(
|
||||||
|
not_null<Audio::Instance*> instance,
|
||||||
|
const Webrtc::DeviceResolvedId &nowDeviceId);
|
||||||
|
|
||||||
// Thread: Any.
|
// Thread: Any.
|
||||||
QMutex *audioPlayerMutex();
|
QMutex *audioPlayerMutex();
|
||||||
|
|
|
@ -85,7 +85,10 @@ public:
|
||||||
Inner(QThread *thread);
|
Inner(QThread *thread);
|
||||||
~Inner();
|
~Inner();
|
||||||
|
|
||||||
void start(QString id, Fn<void(Update)> updated, Fn<void()> error);
|
void start(
|
||||||
|
Webrtc::DeviceResolvedId id,
|
||||||
|
Fn<void(Update)> updated,
|
||||||
|
Fn<void()> error);
|
||||||
void stop(Fn<void(Result&&)> callback = nullptr);
|
void stop(Fn<void(Result&&)> callback = nullptr);
|
||||||
void pause(bool value, Fn<void(Result&&)> callback);
|
void pause(bool value, Fn<void(Result&&)> callback);
|
||||||
|
|
||||||
|
@ -295,7 +298,7 @@ void Instance::Inner::fail() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Instance::Inner::start(
|
void Instance::Inner::start(
|
||||||
QString id,
|
Webrtc::DeviceResolvedId id,
|
||||||
Fn<void(Update)> updated,
|
Fn<void(Update)> updated,
|
||||||
Fn<void()> error) {
|
Fn<void()> error) {
|
||||||
_updated = std::move(updated);
|
_updated = std::move(updated);
|
||||||
|
@ -305,9 +308,9 @@ void Instance::Inner::start(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start OpenAL Capture
|
// Start OpenAL Capture
|
||||||
const auto utf = id.toStdString();
|
const auto utf = id.isDefault() ? std::string() : id.value.toStdString();
|
||||||
d->device = alcCaptureOpenDevice(
|
d->device = alcCaptureOpenDevice(
|
||||||
utf.c_str(),
|
utf.empty() ? nullptr : utf.c_str(),
|
||||||
kCaptureFrequency,
|
kCaptureFrequency,
|
||||||
AL_FORMAT_MONO16,
|
AL_FORMAT_MONO16,
|
||||||
kCaptureFrequency / 5);
|
kCaptureFrequency / 5);
|
||||||
|
|
|
@ -272,17 +272,19 @@ Instance::Instance()
|
||||||
Player::internal::DetachFromDevice(this);
|
Player::internal::DetachFromDevice(this);
|
||||||
});
|
});
|
||||||
|
|
||||||
_playbackDeviceId.changes() | rpl::start_with_next([=] {
|
_playbackDeviceId.changes(
|
||||||
_detachFromDeviceForce = false;
|
) | rpl::start_with_next([=](Webrtc::DeviceResolvedId id) {
|
||||||
Player::internal::DetachFromDevice(this);
|
if (Player::internal::DetachIfDeviceChanged(this, id)) {
|
||||||
|
_detachFromDeviceForce = false;
|
||||||
|
}
|
||||||
}, _lifetime);
|
}, _lifetime);
|
||||||
}
|
}
|
||||||
|
|
||||||
QString Instance::playbackDeviceId() const {
|
Webrtc::DeviceResolvedId Instance::playbackDeviceId() const {
|
||||||
return _playbackDeviceId.current();
|
return _playbackDeviceId.threadSafeCurrent();
|
||||||
}
|
}
|
||||||
|
|
||||||
QString Instance::captureDeviceId() const {
|
Webrtc::DeviceResolvedId Instance::captureDeviceId() const {
|
||||||
return _captureDeviceId.current();
|
return _captureDeviceId.current();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
|
|
||||||
#include "base/timer.h"
|
#include "base/timer.h"
|
||||||
#include "base/bytes.h"
|
#include "base/bytes.h"
|
||||||
#include "webrtc/webrtc_device_id.h"
|
#include "webrtc/webrtc_device_resolver.h"
|
||||||
|
|
||||||
namespace Core {
|
namespace Core {
|
||||||
class FileLocation;
|
class FileLocation;
|
||||||
|
@ -95,8 +95,11 @@ public:
|
||||||
// Thread: Main.
|
// Thread: Main.
|
||||||
Instance();
|
Instance();
|
||||||
|
|
||||||
[[nodiscard]] QString playbackDeviceId() const;
|
// Thread: Any. Must be locked: AudioMutex.
|
||||||
[[nodiscard]] QString captureDeviceId() const;
|
[[nodiscard]] Webrtc::DeviceResolvedId playbackDeviceId() const;
|
||||||
|
|
||||||
|
// Thread: Main.
|
||||||
|
[[nodiscard]] Webrtc::DeviceResolvedId captureDeviceId() const;
|
||||||
|
|
||||||
[[nodiscard]] std::unique_ptr<Track> createTrack();
|
[[nodiscard]] std::unique_ptr<Track> createTrack();
|
||||||
|
|
||||||
|
@ -119,8 +122,8 @@ private:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::set<Track*> _tracks;
|
std::set<Track*> _tracks;
|
||||||
Webrtc::DeviceId _playbackDeviceId;
|
Webrtc::DeviceResolver _playbackDeviceId;
|
||||||
Webrtc::DeviceId _captureDeviceId;
|
Webrtc::DeviceResolver _captureDeviceId;
|
||||||
|
|
||||||
base::Timer _updateTimer;
|
base::Timer _updateTimer;
|
||||||
|
|
||||||
|
|
|
@ -365,7 +365,7 @@ void Calls::initCaptureButton(
|
||||||
});
|
});
|
||||||
|
|
||||||
struct LevelState {
|
struct LevelState {
|
||||||
std::unique_ptr<Webrtc::DeviceId> computedDeviceId;
|
std::unique_ptr<Webrtc::DeviceResolver> deviceId;
|
||||||
std::unique_ptr<Webrtc::AudioInputTester> tester;
|
std::unique_ptr<Webrtc::AudioInputTester> tester;
|
||||||
base::Timer timer;
|
base::Timer timer;
|
||||||
Ui::Animations::Simple animation;
|
Ui::Animations::Simple animation;
|
||||||
|
@ -388,18 +388,18 @@ void Calls::initCaptureButton(
|
||||||
});
|
});
|
||||||
_testingMicrophone.value() | rpl::start_with_next([=](bool testing) {
|
_testingMicrophone.value() | rpl::start_with_next([=](bool testing) {
|
||||||
if (testing) {
|
if (testing) {
|
||||||
state->computedDeviceId = std::make_unique<Webrtc::DeviceId>(
|
state->deviceId = std::make_unique<Webrtc::DeviceResolver>(
|
||||||
&Core::App().mediaDevices(),
|
&Core::App().mediaDevices(),
|
||||||
Webrtc::DeviceType::Capture,
|
Webrtc::DeviceType::Capture,
|
||||||
rpl::duplicate(resolvedId));
|
rpl::duplicate(resolvedId));
|
||||||
state->tester = std::make_unique<AudioInputTester>(
|
state->tester = std::make_unique<AudioInputTester>(
|
||||||
state->computedDeviceId->value());
|
state->deviceId->value());
|
||||||
state->timer.callEach(kMicTestUpdateInterval);
|
state->timer.callEach(kMicTestUpdateInterval);
|
||||||
} else {
|
} else {
|
||||||
state->timer.cancel();
|
state->timer.cancel();
|
||||||
state->animation.stop();
|
state->animation.stop();
|
||||||
state->tester = nullptr;
|
state->tester = nullptr;
|
||||||
state->computedDeviceId = nullptr;
|
state->deviceId = nullptr;
|
||||||
}
|
}
|
||||||
}, level->lifetime());
|
}, level->lifetime());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 16b8f6ee0a1b4a1852266f1b3fc727f6a82c3716
|
Subproject commit 72b1aa0405e14beef0b596c9bc748eb8905a7ef8
|
Loading…
Add table
Reference in a new issue