Improve ffmpeg audio loader interface.

This commit is contained in:
John Preston 2023-03-06 13:51:37 +04:00
parent af95bd6fb7
commit 0880a83c2c
9 changed files with 123 additions and 171 deletions

View file

@ -1677,6 +1677,10 @@ public:
//} //}
} }
int sampleSize() override {
Unexpected("We shouldn't try to read sample size here.");
}
int format() override { int format() override {
return 0; return 0;
} }
@ -1701,9 +1705,9 @@ public:
return _coverFormat; return _coverFormat;
} }
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override { ReadResult readMore() override {
DEBUG_LOG(("Audio Read Error: should not call this")); DEBUG_LOG(("Audio Read Error: should not call this"));
return ReadResult::Error; return ReadError::Other;
} }
~FFMpegAttributesReader() { ~FFMpegAttributesReader() {
@ -1770,24 +1774,23 @@ public:
} }
}; };
while (processed < countbytes) { while (processed < countbytes) {
buffer.resize(0); const auto result = readMore();
const auto sampleBytes = v::is<bytes::const_span>(result)
int64 samples = 0; ? v::get<bytes::const_span>(result)
auto res = readMore(buffer, samples); : bytes::const_span();
if (res == ReadResult::Error || res == ReadResult::EndOfFile) { if (result == ReadError::Other
|| result == ReadError::EndOfFile) {
break; break;
} } else if (sampleBytes.empty()) {
if (buffer.isEmpty()) {
continue; continue;
} }
auto sampleBytes = bytes::make_span(buffer);
if (fmt == AL_FORMAT_MONO8 || fmt == AL_FORMAT_STEREO8) { if (fmt == AL_FORMAT_MONO8 || fmt == AL_FORMAT_STEREO8) {
Media::Audio::IterateSamples<uchar>(sampleBytes, callback); Media::Audio::IterateSamples<uchar>(sampleBytes, callback);
} else if (fmt == AL_FORMAT_MONO16 || fmt == AL_FORMAT_STEREO16) { } else if (fmt == AL_FORMAT_MONO16 || fmt == AL_FORMAT_STEREO16) {
Media::Audio::IterateSamples<int16>(sampleBytes, callback); Media::Audio::IterateSamples<int16>(sampleBytes, callback);
} }
processed += sampleSize() * samples; processed += sampleBytes.size();
} }
if (sumbytes > 0 && peaks.size() < Media::Player::kWaveformSamplesCount) { if (sumbytes > 0 && peaks.size() < Media::Player::kWaveformSamplesCount) {
peaks.push_back(peak); peaks.push_back(peak);

View file

@ -288,26 +288,22 @@ bool AbstractAudioFFMpegLoader::initUsingContext(
} }
auto AbstractAudioFFMpegLoader::replaceFrameAndRead( auto AbstractAudioFFMpegLoader::replaceFrameAndRead(
FFmpeg::FramePointer frame, FFmpeg::FramePointer frame)
QByteArray &result,
int64 &samplesAdded)
-> ReadResult { -> ReadResult {
_frame = std::move(frame); _frame = std::move(frame);
return readFromReadyFrame(result, samplesAdded); return readFromReadyFrame();
} }
auto AbstractAudioFFMpegLoader::readFromReadyContext( auto AbstractAudioFFMpegLoader::readFromReadyContext(
not_null<AVCodecContext *> context, not_null<AVCodecContext*> context)
QByteArray &result,
int64 &samplesAdded)
-> ReadResult { -> ReadResult {
const auto res = avcodec_receive_frame(context, _frame.get()); const auto res = avcodec_receive_frame(context, _frame.get());
if (res >= 0) { if (res >= 0) {
return readFromReadyFrame(result, samplesAdded); return readFromReadyFrame();
} }
if (res == AVERROR_EOF) { if (res == AVERROR_EOF) {
return ReadResult::EndOfFile; return ReadError::EndOfFile;
} else if (res != AVERROR(EAGAIN)) { } else if (res != AVERROR(EAGAIN)) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Audio Error: " LOG(("Audio Error: "
@ -318,9 +314,9 @@ auto AbstractAudioFFMpegLoader::readFromReadyContext(
).arg(res ).arg(res
).arg(av_make_error_string(err, sizeof(err), res) ).arg(av_make_error_string(err, sizeof(err), res)
)); ));
return ReadResult::Error; return ReadError::Other;
} }
return ReadResult::Wait; return ReadError::Wait;
} }
bool AbstractAudioFFMpegLoader::frameHasDesiredFormat() const { bool AbstractAudioFFMpegLoader::frameHasDesiredFormat() const {
@ -494,29 +490,13 @@ bool AbstractAudioFFMpegLoader::ensureResampleSpaceAvailable(int samples) {
return true; return true;
} }
void AbstractAudioFFMpegLoader::appendSamples( auto AbstractAudioFFMpegLoader::readFromReadyFrame() -> ReadResult {
QByteArray & result,
int64 & samplesAdded,
uint8_t * *data,
int count) const {
result.append(
reinterpret_cast<const char *>(data[0]),
count * _outputSampleSize);
samplesAdded += count;
}
AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
QByteArray & result,
int64 & samplesAdded) {
if (frameHasDesiredFormat()) { if (frameHasDesiredFormat()) {
appendSamples( return bytes::const_span(
result, reinterpret_cast<const bytes::type*>(_frame->extended_data[0]),
samplesAdded, _frame->nb_samples * _outputSampleSize);
_frame->extended_data,
_frame->nb_samples);
return ReadResult::Ok;
} else if (!initResampleForFrame()) { } else if (!initResampleForFrame()) {
return ReadResult::Error; return ReadError::Other;
} }
const auto maxSamples = av_rescale_rnd( const auto maxSamples = av_rescale_rnd(
@ -525,7 +505,7 @@ AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
_swrSrcRate, _swrSrcRate,
AV_ROUND_UP); AV_ROUND_UP);
if (!ensureResampleSpaceAvailable(maxSamples)) { if (!ensureResampleSpaceAvailable(maxSamples)) {
return ReadResult::Error; return ReadError::Other;
} }
const auto samples = swr_convert( const auto samples = swr_convert(
_swrContext, _swrContext,
@ -543,15 +523,11 @@ AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
).arg(samples ).arg(samples
).arg(av_make_error_string(err, sizeof(err), samples) ).arg(av_make_error_string(err, sizeof(err), samples)
)); ));
return ReadResult::Error; return ReadError::Other;
} }
return bytes::const_span(
appendSamples( reinterpret_cast<const bytes::type*>(_swrDstData[0]),
result, samples * _outputSampleSize);
samplesAdded,
_swrDstData,
samples);
return ReadResult::Ok;
} }
AbstractAudioFFMpegLoader::~AbstractAudioFFMpegLoader() { AbstractAudioFFMpegLoader::~AbstractAudioFFMpegLoader() {
@ -648,14 +624,9 @@ bool FFMpegLoader::seekTo(crl::time positionMs) {
return true; return true;
} }
AudioPlayerLoader::ReadResult FFMpegLoader::readMore( FFMpegLoader::ReadResult FFMpegLoader::readMore() {
QByteArray &result, const auto readResult = readFromReadyContext(_codecContext);
int64 &samplesAdded) { if (readResult != ReadError::Wait) {
const auto readResult = readFromReadyContext(
_codecContext,
result,
samplesAdded);
if (readResult != ReadResult::Wait) {
return readResult; return readResult;
} }
@ -671,10 +642,10 @@ AudioPlayerLoader::ReadResult FFMpegLoader::readMore(
).arg(res ).arg(res
).arg(av_make_error_string(err, sizeof(err), res) ).arg(av_make_error_string(err, sizeof(err), res)
)); ));
return ReadResult::Error; return ReadError::Other;
} }
avcodec_send_packet(_codecContext, nullptr); // drain avcodec_send_packet(_codecContext, nullptr); // drain
return ReadResult::Ok; return bytes::const_span();
} }
if (_packet.stream_index == streamId) { if (_packet.stream_index == streamId) {
@ -696,11 +667,11 @@ AudioPlayerLoader::ReadResult FFMpegLoader::readMore(
//if (res == AVERROR_INVALIDDATA) { //if (res == AVERROR_INVALIDDATA) {
// return ReadResult::NotYet; // try to skip bad packet // return ReadResult::NotYet; // try to skip bad packet
//} //}
return ReadResult::Error; return ReadError::Other;
} }
} }
av_packet_unref(&_packet); av_packet_unref(&_packet);
return ReadResult::Ok; return bytes::const_span();
} }
FFMpegLoader::~FFMpegLoader() { FFMpegLoader::~FFMpegLoader() {

View file

@ -96,6 +96,10 @@ public:
return _swrDstRate; return _swrDstRate;
} }
int sampleSize() override {
return _outputSampleSize;
}
int format() override { int format() override {
return _outputFormat; return _outputFormat;
} }
@ -107,35 +111,20 @@ protected:
not_null<AVCodecContext *> context, not_null<AVCodecContext *> context,
int64 initialCount, int64 initialCount,
int initialFrequency); int initialFrequency);
ReadResult readFromReadyContext( [[nodiscard]] ReadResult readFromReadyContext(
not_null<AVCodecContext *> context, not_null<AVCodecContext*> context);
QByteArray &result,
int64 &samplesAdded);
// Streaming player provides the first frame to the ChildFFMpegLoader // Streaming player provides the first frame to the ChildFFMpegLoader
// so we replace our allocated frame with the one provided. // so we replace our allocated frame with the one provided.
ReadResult replaceFrameAndRead( [[nodiscard]] ReadResult replaceFrameAndRead(FFmpeg::FramePointer frame);
FFmpeg::FramePointer frame,
QByteArray &result,
int64 &samplesAdded);
int sampleSize() const {
return _outputSampleSize;
}
private: private:
ReadResult readFromReadyFrame(QByteArray &result, int64 &samplesAdded); [[nodiscard]] ReadResult readFromReadyFrame();
bool frameHasDesiredFormat() const; bool frameHasDesiredFormat() const;
bool initResampleForFrame(); bool initResampleForFrame();
bool initResampleUsingFormat(); bool initResampleUsingFormat();
bool ensureResampleSpaceAvailable(int samples); bool ensureResampleSpaceAvailable(int samples);
void appendSamples(
QByteArray &result,
int64 &samplesAdded,
uint8_t **data,
int count) const;
FFmpeg::FramePointer _frame; FFmpeg::FramePointer _frame;
int _outputFormat = AL_FORMAT_STEREO16; int _outputFormat = AL_FORMAT_STEREO16;
int _outputChannels = 2; int _outputChannels = 2;
@ -171,7 +160,7 @@ public:
bool open(crl::time positionMs) override; bool open(crl::time positionMs) override;
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override; ReadResult readMore() override;
~FFMpegLoader(); ~FFMpegLoader();

View file

@ -31,27 +31,20 @@ bool AudioPlayerLoader::check(
return (this->_file == file) && (this->_data.size() == data.size()); return (this->_file == file) && (this->_data.size() == data.size());
} }
void AudioPlayerLoader::saveDecodedSamples( void AudioPlayerLoader::saveDecodedSamples(not_null<QByteArray*> samples) {
not_null<QByteArray*> samples,
not_null<int64*> samplesCount) {
Expects(_savedSamplesCount == 0);
Expects(_savedSamples.isEmpty()); Expects(_savedSamples.isEmpty());
Expects(!_holdsSavedSamples); Expects(!_holdsSavedSamples);
samples->swap(_savedSamples); samples->swap(_savedSamples);
std::swap(*samplesCount, _savedSamplesCount);
_holdsSavedSamples = true; _holdsSavedSamples = true;
} }
void AudioPlayerLoader::takeSavedDecodedSamples( void AudioPlayerLoader::takeSavedDecodedSamples(
not_null<QByteArray*> samples, not_null<QByteArray*> samples) {
not_null<int64*> samplesCount) {
Expects(*samplesCount == 0);
Expects(samples->isEmpty()); Expects(samples->isEmpty());
Expects(_holdsSavedSamples); Expects(_holdsSavedSamples);
samples->swap(_savedSamples); samples->swap(_savedSamples);
std::swap(*samplesCount, _savedSamplesCount);
_holdsSavedSamples = false; _holdsSavedSamples = false;
} }

View file

@ -26,18 +26,18 @@ public:
virtual bool open(crl::time positionMs) = 0; virtual bool open(crl::time positionMs) = 0;
virtual int64 samplesCount() = 0; virtual int64 samplesCount() = 0;
virtual int samplesFrequency() = 0; virtual int samplesFrequency() = 0;
virtual int sampleSize() = 0;
virtual int format() = 0; virtual int format() = 0;
enum class ReadResult { enum class ReadError {
Error, Other,
NotYet, NotYet,
Ok,
Wait, Wait,
EndOfFile, EndOfFile,
}; };
virtual ReadResult readMore( using ReadResult = std::variant<bytes::const_span, ReadError>;
QByteArray &samples, [[nodiscard]] virtual ReadResult readMore() = 0;
int64 &samplesCount) = 0;
virtual void enqueuePackets(std::deque<FFmpeg::Packet> &&packets) { virtual void enqueuePackets(std::deque<FFmpeg::Packet> &&packets) {
Unexpected("enqueuePackets() call on not ChildFFMpegLoader."); Unexpected("enqueuePackets() call on not ChildFFMpegLoader.");
} }
@ -48,12 +48,8 @@ public:
return false; return false;
} }
void saveDecodedSamples( void saveDecodedSamples(not_null<QByteArray*> samples);
not_null<QByteArray*> samples, void takeSavedDecodedSamples(not_null<QByteArray*> samples);
not_null<int64*> samplesCount);
void takeSavedDecodedSamples(
not_null<QByteArray*> samples,
not_null<int64*> samplesCount);
bool holdsSavedDecodedSamples() const; bool holdsSavedDecodedSamples() const;
protected: protected:
@ -69,7 +65,6 @@ protected:
private: private:
QByteArray _savedSamples; QByteArray _savedSamples;
int64 _savedSamplesCount = 0;
bool _holdsSavedSamples = false; bool _holdsSavedSamples = false;
}; };

View file

@ -166,15 +166,25 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
auto waiting = false; auto waiting = false;
auto errAtStart = started; auto errAtStart = started;
QByteArray samples; auto accumulated = QByteArray();
int64 samplesCount = 0; auto accumulatedCount = 0;
if (l->holdsSavedDecodedSamples()) { if (l->holdsSavedDecodedSamples()) {
l->takeSavedDecodedSamples(&samples, &samplesCount); l->takeSavedDecodedSamples(&accumulated);
accumulatedCount = accumulated.size() / l->sampleSize();
} }
while (samples.size() < kPlaybackBufferSize) { while (accumulated.size() < kPlaybackBufferSize) {
auto res = l->readMore(samples, samplesCount); const auto result = l->readMore();
using Result = AudioPlayerLoader::ReadResult; const auto sampleBytes = v::is<bytes::const_span>(result)
if (res == Result::Error) { ? v::get<bytes::const_span>(result)
: bytes::const_span();
if (!sampleBytes.empty()) {
accumulated.append(
reinterpret_cast<const char*>(sampleBytes.data()),
sampleBytes.size());
accumulatedCount += sampleBytes.size() / l->sampleSize();
}
using Error = AudioPlayerLoader::ReadError;
if (result == Error::Other) {
if (errAtStart) { if (errAtStart) {
{ {
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
@ -187,18 +197,18 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
} }
finished = true; finished = true;
break; break;
} else if (res == Result::EndOfFile) { } else if (result == Error::EndOfFile) {
finished = true; finished = true;
break; break;
} else if (res == Result::Ok) { } else if (result == Error::Wait) {
errAtStart = false; waiting = (accumulated.size() < kPlaybackBufferSize)
} else if (res == Result::Wait) { && (accumulated.isEmpty() || !l->forceToBuffer());
waiting = (samples.size() < kPlaybackBufferSize)
&& (!samplesCount || !l->forceToBuffer());
if (waiting) { if (waiting) {
l->saveDecodedSamples(&samples, &samplesCount); l->saveDecodedSamples(&accumulated);
} }
break; break;
} else if (v::is<bytes::const_span>(result)) {
errAtStart = false;
} }
QMutexLocker lock(internal::audioPlayerMutex()); QMutexLocker lock(internal::audioPlayerMutex());
@ -215,7 +225,7 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
return; return;
} }
if (started || samplesCount) { if (started || !accumulated.isEmpty()) {
Audio::AttachToDevice(); Audio::AttachToDevice();
} }
if (started) { if (started) {
@ -234,7 +244,7 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
track->state.position = position; track->state.position = position;
track->fadeStartPosition = position; track->fadeStartPosition = position;
} }
if (samplesCount) { if (!accumulated.isEmpty()) {
track->ensureStreamCreated(type); track->ensureStreamCreated(type);
auto bufferIndex = track->getNotQueuedBufferIndex(); auto bufferIndex = track->getNotQueuedBufferIndex();
@ -246,18 +256,26 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
} }
if (bufferIndex < 0) { // No free buffers, wait. if (bufferIndex < 0) { // No free buffers, wait.
l->saveDecodedSamples(&samples, &samplesCount); l->saveDecodedSamples(&accumulated);
return; return;
} else if (l->forceToBuffer()) { } else if (l->forceToBuffer()) {
l->setForceToBuffer(false); l->setForceToBuffer(false);
} }
track->bufferSamples[bufferIndex] = samples; track->bufferSamples[bufferIndex] = accumulated;
track->samplesCount[bufferIndex] = samplesCount; track->samplesCount[bufferIndex] = accumulatedCount;
track->bufferedLength += samplesCount; track->bufferedLength += accumulatedCount;
alBufferData(track->stream.buffers[bufferIndex], track->format, samples.constData(), samples.size(), track->frequency); alBufferData(
track->stream.buffers[bufferIndex],
track->format,
accumulated.constData(),
accumulated.size(),
track->frequency);
alSourceQueueBuffers(track->stream.source, 1, track->stream.buffers + bufferIndex); alSourceQueueBuffers(
track->stream.source,
1,
track->stream.buffers + bufferIndex);
if (!internal::audioCheckError()) { if (!internal::audioCheckError()) {
setStoppedState(track, State::StoppedAtError); setStoppedState(track, State::StoppedAtError);

View file

@ -77,12 +77,13 @@ void Track::fillFromData(bytes::vector &&data) {
} }
}; };
do { do {
auto buffer = QByteArray(); using Error = AudioPlayerLoader::ReadError;
auto samplesAdded = int64(0); const auto result = loader.readMore();
auto result = loader.readMore(buffer, samplesAdded); const auto sampleBytes = v::is<bytes::const_span>(result)
if (samplesAdded > 0) { ? v::get<bytes::const_span>(result)
auto sampleBytes = bytes::make_span(buffer); : bytes::const_span();
_samplesCount += samplesAdded; if (!sampleBytes.empty()) {
_samplesCount += sampleBytes.size() / loader.sampleSize();
_samples.insert(_samples.end(), sampleBytes.data(), sampleBytes.data() + sampleBytes.size()); _samples.insert(_samples.end(), sampleBytes.data(), sampleBytes.data() + sampleBytes.size());
if (peaksCount) { if (peaksCount) {
if (format == AL_FORMAT_MONO8 || format == AL_FORMAT_STEREO8) { if (format == AL_FORMAT_MONO8 || format == AL_FORMAT_STEREO8) {
@ -91,17 +92,12 @@ void Track::fillFromData(bytes::vector &&data) {
Media::Audio::IterateSamples<int16>(sampleBytes, peakCallback); Media::Audio::IterateSamples<int16>(sampleBytes, peakCallback);
} }
} }
} } else if (result == Error::Other
|| result == Error::NotYet
using Result = AudioPlayerLoader::ReadResult; || result == Error::Wait) {
switch (result) {
case Result::Error:
case Result::NotYet:
case Result::Wait: {
_failed = true; _failed = true;
} break;
} }
if (result != Result::Ok) { if (!v::is<bytes::const_span>(result)) {
break; break;
} }
} while (true); } while (true);

View file

@ -29,38 +29,27 @@ bool ChildFFMpegLoader::open(crl::time positionMs) {
_parentData->frequency); _parentData->frequency);
} }
AudioPlayerLoader::ReadResult ChildFFMpegLoader::readFromInitialFrame( AudioPlayerLoader::ReadResult ChildFFMpegLoader::readFromInitialFrame() {
QByteArray &result,
int64 &samplesAdded) {
if (!_parentData->frame) { if (!_parentData->frame) {
return ReadResult::Wait; return ReadError::Wait;
} }
return replaceFrameAndRead( return replaceFrameAndRead(base::take(_parentData->frame));
base::take(_parentData->frame),
result,
samplesAdded);
} }
AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore( auto ChildFFMpegLoader::readMore() -> ReadResult {
QByteArray & result, const auto initialFrameResult = readFromInitialFrame();
int64 & samplesAdded) { if (initialFrameResult != ReadError::Wait) {
const auto initialFrameResult = readFromInitialFrame(
result,
samplesAdded);
if (initialFrameResult != ReadResult::Wait) {
return initialFrameResult; return initialFrameResult;
} }
const auto readResult = readFromReadyContext( const auto readResult = readFromReadyContext(
_parentData->codec.get(), _parentData->codec.get());
result, if (readResult != ReadError::Wait) {
samplesAdded);
if (readResult != ReadResult::Wait) {
return readResult; return readResult;
} }
if (_queue.empty()) { if (_queue.empty()) {
return _eofReached ? ReadResult::EndOfFile : ReadResult::Wait; return _eofReached ? ReadError::EndOfFile : ReadError::Wait;
} }
auto packet = std::move(_queue.front()); auto packet = std::move(_queue.front());
@ -69,7 +58,7 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
_eofReached = packet.empty(); _eofReached = packet.empty();
if (_eofReached) { if (_eofReached) {
avcodec_send_packet(_parentData->codec.get(), nullptr); // drain avcodec_send_packet(_parentData->codec.get(), nullptr); // drain
return ReadResult::Ok; return bytes::const_span();
} }
auto res = avcodec_send_packet( auto res = avcodec_send_packet(
@ -86,11 +75,11 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
// There is a sample voice message where skipping such packet // There is a sample voice message where skipping such packet
// results in a crash (read_access to nullptr) in swr_convert(). // results in a crash (read_access to nullptr) in swr_convert().
if (res == AVERROR_INVALIDDATA) { if (res == AVERROR_INVALIDDATA) {
return ReadResult::NotYet; // try to skip bad packet return ReadError::NotYet; // try to skip bad packet
} }
return ReadResult::Error; return ReadError::Other;
} }
return ReadResult::Ok; return bytes::const_span();
} }
void ChildFFMpegLoader::enqueuePackets( void ChildFFMpegLoader::enqueuePackets(

View file

@ -35,7 +35,7 @@ public:
return true; return true;
} }
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override; ReadResult readMore() override;
void enqueuePackets(std::deque<FFmpeg::Packet> &&packets) override; void enqueuePackets(std::deque<FFmpeg::Packet> &&packets) override;
void setForceToBuffer(bool force) override; void setForceToBuffer(bool force) override;
bool forceToBuffer() const override; bool forceToBuffer() const override;
@ -50,9 +50,7 @@ private:
// Streaming player reads first frame by itself and provides it together // Streaming player reads first frame by itself and provides it together
// with the codec context. So we first read data from this frame and // with the codec context. So we first read data from this frame and
// only after that we try to read next packets. // only after that we try to read next packets.
ReadResult readFromInitialFrame( ReadResult readFromInitialFrame();
QByteArray &result,
int64 &samplesAdded);
std::unique_ptr<ExternalSoundData> _parentData; std::unique_ptr<ExternalSoundData> _parentData;
std::deque<FFmpeg::Packet> _queue; std::deque<FFmpeg::Packet> _queue;