Play streaming audio in player.

This commit is contained in:
John Preston 2019-03-01 01:03:25 +04:00
parent f1e0cd6c1d
commit fde8dd9607
44 changed files with 998 additions and 1445 deletions

View File

@ -209,7 +209,9 @@ void Application::showPhoto(
} }
void Application::showDocument(not_null<DocumentData*> document, HistoryItem *item) { void Application::showDocument(not_null<DocumentData*> document, HistoryItem *item) {
if (cUseExternalVideoPlayer() && document->isVideoFile()) { if (cUseExternalVideoPlayer()
&& document->isVideoFile()
&& document->loaded()) {
QDesktopServices::openUrl(QUrl("file:///" + document->location(false).fname)); QDesktopServices::openUrl(QUrl("file:///" + document->location(false).fname));
} else { } else {
_mediaView->showDocument(document, item); _mediaView->showDocument(document, item);
@ -738,6 +740,11 @@ void Application::authSessionDestroy() {
if (_authSession) { if (_authSession) {
unlockTerms(); unlockTerms();
_mtproto->clearGlobalHandlers(); _mtproto->clearGlobalHandlers();
// Must be called before Auth().data() is destroyed,
// because streaming media holds pointers to it.
Media::Player::instance()->handleLogout();
_authSession = nullptr; _authSession = nullptr;
authSessionChanged().notify(true); authSessionChanged().notify(true);
Notify::unreadCounterUpdated(); Notify::unreadCounterUpdated();

View File

@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/media_active_cache.h" #include "core/media_active_cache.h"
#include "core/mime_type.h" #include "core/mime_type.h"
#include "media/audio/media_audio.h" #include "media/audio/media_audio.h"
#include "media/player/media_player_instance.h"
#include "storage/localstorage.h" #include "storage/localstorage.h"
#include "platform/platform_specific.h" #include "platform/platform_specific.h"
#include "history/history.h" #include "history/history.h"
@ -27,9 +28,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/image/image_source.h" #include "ui/image/image_source.h"
#include "mainwindow.h" #include "mainwindow.h"
#include "core/application.h" #include "core/application.h"
// #TODO streaming ui
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_loader_mtproto.h" #include "media/streaming/media_streaming_loader_mtproto.h"
namespace { namespace {
@ -294,52 +292,26 @@ void DocumentOpenClickHandler::Open(
ActionOnLoad action) { ActionOnLoad action) {
if (!data->date) return; if (!data->date) return;
auto msgId = context ? context->fullId() : FullMsgId(); const auto msgId = context ? context->fullId() : FullMsgId();
bool playVoice = data->isVoiceMessage(); const auto playVoice = data->isVoiceMessage();
bool playMusic = data->isAudioFile(); const auto playAnimation = data->isAnimation();
bool playVideo = data->isVideoFile(); const auto &location = data->location(true);
bool playAnimation = data->isAnimation(); if (data->isTheme() && !location.isEmpty() && location.accessEnable()) {
auto &location = data->location(true);
if (data->isTheme()) {
if (!location.isEmpty() && location.accessEnable()) {
Core::App().showDocument(data, context);
location.accessDisable();
return;
}
}
if (data->canBePlayed()) {
Core::App().showDocument(data, context); Core::App().showDocument(data, context);
location.accessDisable();
return;
} else if (data->canBePlayed()) {
if (data->isAudioFile()) {
Media::Player::instance()->playPause({ data, msgId });
} else {
Core::App().showDocument(data, context);
}
return; return;
} }
if (!location.isEmpty() || (!data->data().isEmpty() && (playVoice || playMusic || playVideo || playAnimation))) { if (!location.isEmpty() || (!data->data().isEmpty() && (playVoice || playAnimation))) {
using State = Media::Player::State; using State = Media::Player::State;
if (playVoice) { if (playVoice) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice); Media::Player::instance()->playPause({ data, msgId });
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else {
auto audio = AudioMsgId(data, msgId);
Media::Player::mixer()->play(audio);
Media::Player::Updated().notify(audio);
data->owner().markMediaRead(data);
}
} else if (playMusic) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else {
auto song = AudioMsgId(data, msgId);
Media::Player::mixer()->play(song);
Media::Player::Updated().notify(song);
}
} else if (data->size < App::kImageSizeLimit) { } else if (data->size < App::kImageSizeLimit) {
if (!data->data().isEmpty() && playAnimation) { if (!data->data().isEmpty() && playAnimation) {
if (action == ActionOnLoadPlayInline && context) { if (action == ActionOnLoadPlayInline && context) {
@ -750,35 +722,8 @@ void DocumentData::performActionOnLoad() {
} }
} }
using State = Media::Player::State; using State = Media::Player::State;
if (playVoice) { if (playVoice || playMusic) {
if (loaded()) { DocumentOpenClickHandler::Open({}, this, item, ActionOnLoadNone);
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) {
Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId));
_owner->markMediaRead(this);
}
}
} else if (playMusic) {
if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) {
auto song = AudioMsgId(this, _actionOnLoadMsgId);
Media::Player::mixer()->play(song);
Media::Player::Updated().notify(song);
}
}
} else if (playAnimation) { } else if (playAnimation) {
if (loaded()) { if (loaded()) {
if (_actionOnLoad == ActionOnLoadPlayInline && item) { if (_actionOnLoad == ActionOnLoadPlayInline && item) {

View File

@ -173,9 +173,14 @@ bool ReplyPreview::empty() const {
} // namespace Data } // namespace Data
uint32 AudioMsgId::CreateExternalPlayId() {
static auto Result = uint32(0);
return ++Result ? Result : ++Result;
}
AudioMsgId AudioMsgId::ForVideo() { AudioMsgId AudioMsgId::ForVideo() {
auto result = AudioMsgId(); auto result = AudioMsgId();
result._playId = rand_value<uint32>(); result._externalPlayId = CreateExternalPlayId();
result._type = Type::Video; result._type = Type::Video;
return result; return result;
} }

View File

@ -363,12 +363,14 @@ public:
AudioMsgId( AudioMsgId(
DocumentData *audio, DocumentData *audio,
const FullMsgId &msgId, const FullMsgId &msgId,
uint32 playId = 0) uint32 externalPlayId = 0)
: _audio(audio) : _audio(audio)
, _contextId(msgId) , _contextId(msgId)
, _playId(playId) { , _externalPlayId(externalPlayId) {
setTypeFromAudio(); setTypeFromAudio();
} }
[[nodiscard]] static uint32 CreateExternalPlayId();
[[nodiscard]] static AudioMsgId ForVideo(); [[nodiscard]] static AudioMsgId ForVideo();
Type type() const { Type type() const {
@ -380,8 +382,8 @@ public:
FullMsgId contextId() const { FullMsgId contextId() const {
return _contextId; return _contextId;
} }
uint32 playId() const { uint32 externalPlayId() const {
return _playId; return _externalPlayId;
} }
explicit operator bool() const { explicit operator bool() const {
@ -394,7 +396,7 @@ private:
DocumentData *_audio = nullptr; DocumentData *_audio = nullptr;
Type _type = Type::Unknown; Type _type = Type::Unknown;
FullMsgId _contextId; FullMsgId _contextId;
uint32 _playId = 0; uint32 _externalPlayId = 0;
}; };
@ -408,13 +410,13 @@ inline bool operator<(const AudioMsgId &a, const AudioMsgId &b) {
} else if (b.contextId() < a.contextId()) { } else if (b.contextId() < a.contextId()) {
return false; return false;
} }
return (a.playId() < b.playId()); return (a.externalPlayId() < b.externalPlayId());
} }
inline bool operator==(const AudioMsgId &a, const AudioMsgId &b) { inline bool operator==(const AudioMsgId &a, const AudioMsgId &b) {
return (a.audio() == b.audio()) return (a.audio() == b.audio())
&& (a.contextId() == b.contextId()) && (a.contextId() == b.contextId())
&& (a.playId() == b.playId()); && (a.externalPlayId() == b.externalPlayId());
} }
inline bool operator!=(const AudioMsgId &a, const AudioMsgId &b) { inline bool operator!=(const AudioMsgId &a, const AudioMsgId &b) {

View File

@ -855,5 +855,5 @@ void HistoryDocumentVoice::startSeeking() {
void HistoryDocumentVoice::stopSeeking() { void HistoryDocumentVoice::stopSeeking() {
_seeking = false; _seeking = false;
Media::Player::instance()->stopSeeking(AudioMsgId::Type::Voice); Media::Player::instance()->cancelSeeking(AudioMsgId::Type::Voice);
} }

View File

@ -252,12 +252,8 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
p.setPen(Qt::NoPen); p.setPen(Qt::NoPen);
if (selected) { if (selected) {
p.setBrush(st::msgDateImgBgSelected); p.setBrush(st::msgDateImgBgSelected);
} else if (isThumbAnimation(ms)) {
auto over = _animation->a_thumbOver.current();
p.setBrush(anim::brush(st::msgDateImgBg, st::msgDateImgBgOver, over));
} else { } else {
auto over = ClickHandler::showAsActive(_data->loading() ? _cancell : _savel); p.setBrush(st::msgDateImgBg);
p.setBrush(over ? st::msgDateImgBgOver : st::msgDateImgBg);
} }
p.setOpacity(radialOpacity * p.opacity()); p.setOpacity(radialOpacity * p.opacity());
@ -303,12 +299,8 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
p.setPen(Qt::NoPen); p.setPen(Qt::NoPen);
if (selected) { if (selected) {
p.setBrush(outbg ? st::msgFileOutBgSelected : st::msgFileInBgSelected); p.setBrush(outbg ? st::msgFileOutBgSelected : st::msgFileInBgSelected);
} else if (isThumbAnimation(ms)) {
auto over = _animation->a_thumbOver.current();
p.setBrush(anim::brush(outbg ? st::msgFileOutBg : st::msgFileInBg, outbg ? st::msgFileOutBgOver : st::msgFileInBgOver, over));
} else { } else {
auto over = ClickHandler::showAsActive(_data->loading() ? _cancell : _savel); p.setBrush(outbg ? st::msgFileOutBg : st::msgFileInBg);
p.setBrush(outbg ? (over ? st::msgFileOutBgOver : st::msgFileOutBg) : (over ? st::msgFileInBgOver : st::msgFileInBg));
} }
{ {
@ -322,13 +314,13 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
_animation->radial.draw(p, rinner, st::msgFileRadialLine, fg); _animation->radial.draw(p, rinner, st::msgFileRadialLine, fg);
} }
auto icon = ([showPause, radial, this, loaded, outbg, selected] { auto icon = [&] {
if (showPause) { if (showPause) {
return &(outbg ? (selected ? st::historyFileOutPauseSelected : st::historyFileOutPause) : (selected ? st::historyFileInPauseSelected : st::historyFileInPause)); return &(outbg ? (selected ? st::historyFileOutPauseSelected : st::historyFileOutPause) : (selected ? st::historyFileInPauseSelected : st::historyFileInPause));
} else if (radial || _data->loading()) { } else if (radial || _data->loading()) {
return &(outbg ? (selected ? st::historyFileOutCancelSelected : st::historyFileOutCancel) : (selected ? st::historyFileInCancelSelected : st::historyFileInCancel)); return &(outbg ? (selected ? st::historyFileOutCancelSelected : st::historyFileOutCancel) : (selected ? st::historyFileInCancelSelected : st::historyFileInCancel));
} else if (loaded) { } else if (loaded || _data->canBePlayed()) {
if (_data->isAudioFile() || _data->isVoiceMessage()) { if (_data->canBePlayed()) {
return &(outbg ? (selected ? st::historyFileOutPlaySelected : st::historyFileOutPlay) : (selected ? st::historyFileInPlaySelected : st::historyFileInPlay)); return &(outbg ? (selected ? st::historyFileOutPlaySelected : st::historyFileOutPlay) : (selected ? st::historyFileInPlaySelected : st::historyFileInPlay));
} else if (_data->isImage()) { } else if (_data->isImage()) {
return &(outbg ? (selected ? st::historyFileOutImageSelected : st::historyFileOutImage) : (selected ? st::historyFileInImageSelected : st::historyFileInImage)); return &(outbg ? (selected ? st::historyFileOutImageSelected : st::historyFileOutImage) : (selected ? st::historyFileInImageSelected : st::historyFileInImage));
@ -336,7 +328,7 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
return &(outbg ? (selected ? st::historyFileOutDocumentSelected : st::historyFileOutDocument) : (selected ? st::historyFileInDocumentSelected : st::historyFileInDocument)); return &(outbg ? (selected ? st::historyFileOutDocumentSelected : st::historyFileOutDocument) : (selected ? st::historyFileInDocumentSelected : st::historyFileInDocument));
} }
return &(outbg ? (selected ? st::historyFileOutDownloadSelected : st::historyFileOutDownload) : (selected ? st::historyFileInDownloadSelected : st::historyFileInDownload)); return &(outbg ? (selected ? st::historyFileOutDownloadSelected : st::historyFileOutDownload) : (selected ? st::historyFileInDownloadSelected : st::historyFileInDownload));
})(); }();
icon->paintInCenter(p, inner); icon->paintInCenter(p, inner);
} }
auto namewidth = width() - nameleft - nameright; auto namewidth = width() - nameleft - nameright;
@ -465,20 +457,12 @@ TextState HistoryDocument::textState(QPoint point, StateRequest request) const {
auto nameleft = 0, nametop = 0, nameright = 0, statustop = 0, linktop = 0, bottom = 0; auto nameleft = 0, nametop = 0, nameright = 0, statustop = 0, linktop = 0, bottom = 0;
auto topMinus = isBubbleTop() ? 0 : st::msgFileTopMinus; auto topMinus = isBubbleTop() ? 0 : st::msgFileTopMinus;
if (auto thumbed = Get<HistoryDocumentThumbed>()) { if (const auto thumbed = Get<HistoryDocumentThumbed>()) {
nameleft = st::msgFileThumbPadding.left() + st::msgFileThumbSize + st::msgFileThumbPadding.right(); nameleft = st::msgFileThumbPadding.left() + st::msgFileThumbSize + st::msgFileThumbPadding.right();
nameright = st::msgFileThumbPadding.left(); nameright = st::msgFileThumbPadding.left();
nametop = st::msgFileThumbNameTop - topMinus; nametop = st::msgFileThumbNameTop - topMinus;
linktop = st::msgFileThumbLinkTop - topMinus; linktop = st::msgFileThumbLinkTop - topMinus;
bottom = st::msgFileThumbPadding.top() + st::msgFileThumbSize + st::msgFileThumbPadding.bottom() - topMinus; bottom = st::msgFileThumbPadding.top() + st::msgFileThumbSize + st::msgFileThumbPadding.bottom() - topMinus;
QRect rthumb(rtlrect(st::msgFileThumbPadding.left(), st::msgFileThumbPadding.top() - topMinus, st::msgFileThumbSize, st::msgFileThumbSize, width()));
if ((_data->loading() || _data->uploading() || !loaded) && rthumb.contains(point)) {
result.link = (_data->loading() || _data->uploading()) ? _cancell : _savel;
return result;
}
if (_data->status != FileUploadFailed) { if (_data->status != FileUploadFailed) {
if (rtlrect(nameleft, linktop, thumbed->_linkw, st::semiboldFont->height, width()).contains(point)) { if (rtlrect(nameleft, linktop, thumbed->_linkw, st::semiboldFont->height, width()).contains(point)) {
result.link = (_data->loading() || _data->uploading()) result.link = (_data->loading() || _data->uploading())
@ -487,25 +471,14 @@ TextState HistoryDocument::textState(QPoint point, StateRequest request) const {
return result; return result;
} }
} }
} else {
nameleft = st::msgFilePadding.left() + st::msgFileSize + st::msgFilePadding.right();
nameright = st::msgFilePadding.left();
nametop = st::msgFileNameTop - topMinus;
bottom = st::msgFilePadding.top() + st::msgFileSize + st::msgFilePadding.bottom() - topMinus;
QRect inner(rtlrect(st::msgFilePadding.left(), st::msgFilePadding.top() - topMinus, st::msgFileSize, st::msgFileSize, width()));
if ((_data->loading() || _data->uploading() || !loaded) && inner.contains(point)) {
result.link = (_data->loading() || _data->uploading()) ? _cancell : _savel;
return result;
}
} }
if (auto voice = Get<HistoryDocumentVoice>()) { if (const auto voice = Get<HistoryDocumentVoice>()) {
auto namewidth = width() - nameleft - nameright; auto namewidth = width() - nameleft - nameright;
auto waveformbottom = st::msgFilePadding.top() - topMinus + st::msgWaveformMax + st::msgWaveformMin; auto waveformbottom = st::msgFilePadding.top() - topMinus + st::msgWaveformMax + st::msgWaveformMin;
if (QRect(nameleft, nametop, namewidth, waveformbottom - nametop).contains(point)) { if (QRect(nameleft, nametop, namewidth, waveformbottom - nametop).contains(point)) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice); const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId()) if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) { && !Media::Player::IsStoppedOrStopping(state.state)) {
if (!voice->seeking()) { if (!voice->seeking()) {
voice->setSeekingStart((point.x() - nameleft) / float64(namewidth)); voice->setSeekingStart((point.x() - nameleft) / float64(namewidth));
@ -532,7 +505,13 @@ TextState HistoryDocument::textState(QPoint point, StateRequest request) const {
} }
} }
if (QRect(0, 0, width(), painth).contains(point) && !_data->loading() && !_data->uploading() && !_data->isNull()) { if (QRect(0, 0, width(), painth).contains(point) && !_data->loading() && !_data->uploading() && !_data->isNull()) {
result.link = _openl; if (_data->loading() || _data->uploading()) {
result.link = _cancell;
} else if (loaded || _data->canBePlayed()) {
result.link = _openl;
} else {
result.link = _savel;
}
return result; return result;
} }
return result; return result;
@ -622,53 +601,55 @@ bool HistoryDocument::updateStatusText() const {
statusSize = _data->loadOffset(); statusSize = _data->loadOffset();
} else if (_data->loaded()) { } else if (_data->loaded()) {
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
if (_data->isVoiceMessage()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
if (auto voice = Get<HistoryDocumentVoice>()) {
bool was = (voice->_playback != nullptr);
voice->ensurePlayback(this);
if (!was || state.position != voice->_playback->_position) {
auto prg = state.length ? snap(float64(state.position) / state.length, 0., 1.) : 0.;
if (voice->_playback->_position < state.position) {
voice->_playback->a_progress.start(prg);
} else {
voice->_playback->a_progress = anim::value(0., prg);
}
voice->_playback->_position = state.position;
voice->_playback->_a_progress.start();
}
voice->_lastDurationMs = static_cast<int>((state.length * 1000LL) / state.frequency); // Bad :(
}
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
if (auto voice = Get<HistoryDocumentVoice>()) {
voice->checkPlaybackFinished();
}
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Voice);
}
} else if (_data->isAudioFile()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, _parent->data()->fullId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Song);
}
}
} else { } else {
statusSize = FileStatusSizeReady; statusSize = FileStatusSizeReady;
} }
if (_data->isVoiceMessage()) {
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
if (auto voice = Get<HistoryDocumentVoice>()) {
bool was = (voice->_playback != nullptr);
voice->ensurePlayback(this);
if (!was || state.position != voice->_playback->_position) {
auto prg = state.length ? snap(float64(state.position) / state.length, 0., 1.) : 0.;
if (voice->_playback->_position < state.position) {
voice->_playback->a_progress.start(prg);
} else {
voice->_playback->a_progress = anim::value(0., prg);
}
voice->_playback->_position = state.position;
voice->_playback->_a_progress.start();
}
voice->_lastDurationMs = static_cast<int>((state.length * 1000LL) / state.frequency); // Bad :(
}
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
if (auto voice = Get<HistoryDocumentVoice>()) {
voice->checkPlaybackFinished();
}
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Voice);
}
} else if (_data->isAudioFile()) {
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Song);
}
}
if (statusSize != _statusSize) { if (statusSize != _statusSize) {
setStatusSize(statusSize, realDuration); setStatusSize(statusSize, realDuration);
} }
@ -708,9 +689,9 @@ void HistoryDocument::clickHandlerPressedChanged(const ClickHandlerPtr &p, bool
if (pressed && p == voice->_seekl && !voice->seeking()) { if (pressed && p == voice->_seekl && !voice->seeking()) {
voice->startSeeking(); voice->startSeeking();
} else if (!pressed && voice->seeking()) { } else if (!pressed && voice->seeking()) {
auto type = AudioMsgId::Type::Voice; const auto type = AudioMsgId::Type::Voice;
auto state = Media::Player::mixer()->currentState(type); const auto state = Media::Player::instance()->getState(type);
if (state.id == AudioMsgId(_data, _parent->data()->fullId()) && state.length) { if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId()) && state.length) {
auto currentProgress = voice->seekingCurrent(); auto currentProgress = voice->seekingCurrent();
auto currentPosition = state.frequency auto currentPosition = state.frequency
? qRound(currentProgress * state.length * 1000. / state.frequency) ? qRound(currentProgress * state.length * 1000. / state.frequency)

View File

@ -61,6 +61,12 @@ protected:
bool dataLoaded() const override; bool dataLoaded() const override;
private: private:
struct StateFromPlayback {
int statusSize = 0;
bool showPause = false;
int realDuration = 0;
};
QSize countOptimalSize() override; QSize countOptimalSize() override;
QSize countCurrentSize(int newWidth) override; QSize countCurrentSize(int newWidth) override;

View File

@ -13,6 +13,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio.h" #include "media/audio/media_audio.h"
#include "media/clip/media_clip_reader.h" #include "media/clip/media_clip_reader.h"
#include "media/player/media_player_round_controller.h" #include "media/player/media_player_round_controller.h"
#include "media/player/media_player_instance.h"
#include "media/view/media_view_playback_progress.h" #include "media/view/media_view_playback_progress.h"
#include "boxes/confirm_box.h" #include "boxes/confirm_box.h"
#include "history/history_item_components.h" #include "history/history_item_components.h"
@ -750,8 +751,8 @@ void HistoryGif::updateStatusText() const {
if (const auto video = activeRoundPlayer()) { if (const auto video = activeRoundPlayer()) {
statusSize = -1 - _data->duration(); statusSize = -1 - _data->duration();
const auto state = Media::Player::mixer()->currentState( const auto type = AudioMsgId::Type::Voice;
AudioMsgId::Type::Voice); const auto state = Media::Player::instance()->getState(type);
if (state.id == video->audioMsgId() && state.length) { if (state.id == video->audioMsgId() && state.length) {
auto position = int64(0); auto position = int64(0);
if (Media::Player::IsStoppedAtEnd(state.state)) { if (Media::Player::IsStoppedAtEnd(state.state)) {

View File

@ -894,31 +894,24 @@ bool File::updateStatusText() const {
} else if (_document->loading()) { } else if (_document->loading()) {
statusSize = _document->loadOffset(); statusSize = _document->loadOffset();
} else if (_document->loaded()) { } else if (_document->loaded()) {
if (_document->isVoiceMessage()) { statusSize = FileStatusSizeLoaded;
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
} else if (_document->isAudioFile()) {
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
} else {
statusSize = FileStatusSizeLoaded;
}
} else { } else {
statusSize = FileStatusSizeReady; statusSize = FileStatusSizeReady;
} }
if (_document->isVoiceMessage() || _document->isAudioFile()) {
const auto type = _document->isVoiceMessage() ? AudioMsgId::Type::Voice : AudioMsgId::Type::Song;
const auto state = Media::Player::instance()->getState(type);
if (state.id == AudioMsgId(_document, FullMsgId(), state.id.externalPlayId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId(), state.id.externalPlayId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
}
if (statusSize != _statusSize) { if (statusSize != _statusSize) {
int32 duration = _document->isSong() int32 duration = _document->isSong()
? _document->song()->duration ? _document->song()->duration

View File

@ -366,11 +366,7 @@ MainWidget::MainWidget(
, _sideShadow(this) , _sideShadow(this)
, _dialogs(this, _controller) , _dialogs(this, _controller)
, _history(this, _controller) , _history(this, _controller)
, _playerPlaylist( , _playerPlaylist(this, _controller)
this,
_controller,
Media::Player::Panel::Layout::OnlyPlaylist)
, _playerPanel(this, _controller, Media::Player::Panel::Layout::Full)
, _noUpdatesTimer([=] { sendPing(); }) , _noUpdatesTimer([=] { sendPing(); })
, _byPtsTimer([=] { getDifferenceByPts(); }) , _byPtsTimer([=] { getDifferenceByPts(); })
, _bySeqTimer([=] { getDifference(); }) , _bySeqTimer([=] { getDifference(); })
@ -447,15 +443,6 @@ MainWidget::MainWidget(
} }
}); });
_playerPanel->setPinCallback([this] { switchToFixedPlayer(); });
_playerPanel->setCloseCallback([this] { closeBothPlayers(); });
subscribe(Media::Player::instance()->titleButtonOver(), [this](bool over) {
if (over) {
_playerPanel->showFromOther();
} else {
_playerPanel->hideFromOther();
}
});
subscribe(Media::Player::instance()->playerWidgetOver(), [this](bool over) { subscribe(Media::Player::instance()->playerWidgetOver(), [this](bool over) {
if (over) { if (over) {
if (_playerPlaylist->isHidden()) { if (_playerPlaylist->isHidden()) {
@ -471,7 +458,7 @@ MainWidget::MainWidget(
}); });
subscribe(Media::Player::instance()->tracksFinishedNotifier(), [this](AudioMsgId::Type type) { subscribe(Media::Player::instance()->tracksFinishedNotifier(), [this](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Voice) { if (type == AudioMsgId::Type::Voice) {
auto songState = Media::Player::mixer()->currentState(AudioMsgId::Type::Song); const auto songState = Media::Player::instance()->getState(AudioMsgId::Type::Song);
if (!songState.id || IsStoppedOrStopping(songState.state)) { if (!songState.id || IsStoppedOrStopping(songState.state)) {
closeBothPlayers(); closeBothPlayers();
} }
@ -1189,7 +1176,7 @@ void MainWidget::messagesAffected(
void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) { void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
using State = Media::Player::State; using State = Media::Player::State;
const auto document = audioId.audio(); const auto document = audioId.audio();
auto state = Media::Player::mixer()->currentState(audioId.type()); auto state = Media::Player::instance()->getState(audioId.type());
if (state.id == audioId && state.state == State::StoppedAtStart) { if (state.id == audioId && state.state == State::StoppedAtStart) {
state.state = State::Stopped; state.state = State::Stopped;
Media::Player::mixer()->clearStoppedAtStart(audioId); Media::Player::mixer()->clearStoppedAtStart(audioId);
@ -1220,47 +1207,12 @@ void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
} }
} }
void MainWidget::switchToPanelPlayer() {
if (_playerUsingPanel) return;
_playerUsingPanel = true;
_player->hide(anim::type::normal);
_playerVolume.destroyDelayed();
_playerPlaylist->hideIgnoringEnterEvents();
Media::Player::instance()->usePanelPlayer().notify(true, true);
}
void MainWidget::switchToFixedPlayer() {
if (!_playerUsingPanel) return;
_playerUsingPanel = false;
if (!_player) {
createPlayer();
} else {
_player->show(anim::type::normal);
if (!_playerVolume) {
_playerVolume.create(this);
_player->entity()->volumeWidgetCreated(_playerVolume);
updateMediaPlayerPosition();
}
}
Media::Player::instance()->usePanelPlayer().notify(false, true);
_playerPanel->hideIgnoringEnterEvents();
}
void MainWidget::closeBothPlayers() { void MainWidget::closeBothPlayers() {
if (_playerUsingPanel) { if (_player) {
_playerUsingPanel = false;
_player.destroyDelayed();
} else if (_player) {
_player->hide(anim::type::normal); _player->hide(anim::type::normal);
} }
_playerVolume.destroyDelayed(); _playerVolume.destroyDelayed();
Media::Player::instance()->usePanelPlayer().notify(false, true);
_playerPanel->hideIgnoringEnterEvents();
_playerPlaylist->hideIgnoringEnterEvents(); _playerPlaylist->hideIgnoringEnterEvents();
Media::Player::instance()->stop(AudioMsgId::Type::Voice); Media::Player::instance()->stop(AudioMsgId::Type::Voice);
Media::Player::instance()->stop(AudioMsgId::Type::Song); Media::Player::instance()->stop(AudioMsgId::Type::Song);
@ -1269,9 +1221,6 @@ void MainWidget::closeBothPlayers() {
} }
void MainWidget::createPlayer() { void MainWidget::createPlayer() {
if (_playerUsingPanel) {
return;
}
if (!_player) { if (!_player) {
_player.create(this, object_ptr<Media::Player::Widget>(this)); _player.create(this, object_ptr<Media::Player::Widget>(this));
rpl::merge( rpl::merge(
@ -1314,7 +1263,7 @@ void MainWidget::playerHeightUpdated() {
updateControlsGeometry(); updateControlsGeometry();
} }
if (!_playerHeight && _player->isHidden()) { if (!_playerHeight && _player->isHidden()) {
auto state = Media::Player::mixer()->currentState(Media::Player::instance()->getActiveType()); const auto state = Media::Player::instance()->getState(Media::Player::instance()->getActiveType());
if (!state.id || Media::Player::IsStoppedOrStopping(state.state)) { if (!state.id || Media::Player::IsStoppedOrStopping(state.state)) {
_playerVolume.destroyDelayed(); _playerVolume.destroyDelayed();
_player.destroyDelayed(); _player.destroyDelayed();
@ -2042,10 +1991,6 @@ Window::SectionSlideParams MainWidget::prepareShowAnimation(
if (playerVolumeVisible) { if (playerVolumeVisible) {
_playerVolume->hide(); _playerVolume->hide();
} }
auto playerPanelVisible = !_playerPanel->isHidden();
if (playerPanelVisible) {
_playerPanel->hide();
}
auto playerPlaylistVisible = !_playerPlaylist->isHidden(); auto playerPlaylistVisible = !_playerPlaylist->isHidden();
if (playerPlaylistVisible) { if (playerPlaylistVisible) {
_playerPlaylist->hide(); _playerPlaylist->hide();
@ -2073,9 +2018,6 @@ Window::SectionSlideParams MainWidget::prepareShowAnimation(
if (playerVolumeVisible) { if (playerVolumeVisible) {
_playerVolume->show(); _playerVolume->show();
} }
if (playerPanelVisible) {
_playerPanel->show();
}
if (playerPlaylistVisible) { if (playerPlaylistVisible) {
_playerPlaylist->show(); _playerPlaylist->show();
} }
@ -2330,7 +2272,6 @@ void MainWidget::orderWidgets() {
} }
_connecting->raise(); _connecting->raise();
_playerPlaylist->raise(); _playerPlaylist->raise();
_playerPanel->raise();
floatPlayerRaiseAll(); floatPlayerRaiseAll();
if (_hider) _hider->raise(); if (_hider) _hider->raise();
} }
@ -2352,10 +2293,6 @@ QPixmap MainWidget::grabForShowAnimation(const Window::SectionSlideParams &param
if (playerVolumeVisible) { if (playerVolumeVisible) {
_playerVolume->hide(); _playerVolume->hide();
} }
auto playerPanelVisible = !_playerPanel->isHidden();
if (playerPanelVisible) {
_playerPanel->hide();
}
auto playerPlaylistVisible = !_playerPlaylist->isHidden(); auto playerPlaylistVisible = !_playerPlaylist->isHidden();
if (playerPlaylistVisible) { if (playerPlaylistVisible) {
_playerPlaylist->hide(); _playerPlaylist->hide();
@ -2386,9 +2323,6 @@ QPixmap MainWidget::grabForShowAnimation(const Window::SectionSlideParams &param
if (playerVolumeVisible) { if (playerVolumeVisible) {
_playerVolume->show(); _playerVolume->show();
} }
if (playerPanelVisible) {
_playerPanel->show();
}
if (playerPlaylistVisible) { if (playerPlaylistVisible) {
_playerPlaylist->show(); _playerPlaylist->show();
} }
@ -2890,7 +2824,6 @@ void MainWidget::updateThirdColumnToCurrentChat(
} }
void MainWidget::updateMediaPlayerPosition() { void MainWidget::updateMediaPlayerPosition() {
_playerPanel->moveToRight(0, 0);
if (_player && _playerVolume) { if (_player && _playerVolume) {
auto relativePosition = _player->entity()->getPositionForVolumeWidget(); auto relativePosition = _player->entity()->getPositionForVolumeWidget();
auto playerMargins = _playerVolume->getMargin(); auto playerMargins = _playerVolume->getMargin();
@ -3524,7 +3457,6 @@ void MainWidget::openPeerByName(
bool MainWidget::contentOverlapped(const QRect &globalRect) { bool MainWidget::contentOverlapped(const QRect &globalRect) {
return (_history->contentOverlapped(globalRect) return (_history->contentOverlapped(globalRect)
|| _playerPanel->overlaps(globalRect)
|| _playerPlaylist->overlaps(globalRect) || _playerPlaylist->overlaps(globalRect)
|| (_playerVolume && _playerVolume->overlaps(globalRect))); || (_playerVolume && _playerVolume->overlaps(globalRect)));
} }

View File

@ -365,8 +365,6 @@ private:
void setupConnectingWidget(); void setupConnectingWidget();
void createPlayer(); void createPlayer();
void switchToPanelPlayer();
void switchToFixedPlayer();
void closeBothPlayers(); void closeBothPlayers();
void playerHeightUpdated(); void playerHeightUpdated();
@ -501,7 +499,6 @@ private:
= { nullptr }; = { nullptr };
object_ptr<Media::Player::VolumeWidget> _playerVolume = { nullptr }; object_ptr<Media::Player::VolumeWidget> _playerVolume = { nullptr };
object_ptr<Media::Player::Panel> _playerPlaylist; object_ptr<Media::Player::Panel> _playerPlaylist;
object_ptr<Media::Player::Panel> _playerPanel;
bool _playerUsingPanel = false; bool _playerUsingPanel = false;
base::unique_qptr<Window::HistoryHider> _hider; base::unique_qptr<Window::HistoryHider> _hider;

View File

@ -422,7 +422,8 @@ void Mixer::Track::resetSpeedEffect() {
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS #endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::reattach(AudioMsgId::Type type) { void Mixer::Track::reattach(AudioMsgId::Type type) {
if (isStreamCreated() || !samplesCount[0]) { if (isStreamCreated()
|| (!samplesCount[0] && !state.id.externalPlayId())) {
return; return;
} }
@ -440,11 +441,13 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
&& (state.state != State::PausedAtEnd) && (state.state != State::PausedAtEnd)
&& !state.waitingForData) { && !state.waitingForData) {
alSourcef(stream.source, AL_GAIN, ComputeVolume(type)); alSourcef(stream.source, AL_GAIN, ComputeVolume(type));
LOG(("alSourcePlay: reattach for %1").arg(state.id.externalPlayId()));
alSourcePlay(stream.source); alSourcePlay(stream.source);
if (IsPaused(state.state)) { if (IsPaused(state.state)) {
// We must always start the source if we want the AL_SAMPLE_OFFSET to be applied. // We must always start the source if we want the AL_SAMPLE_OFFSET to be applied.
// Otherwise it won't be read by alGetSource and we'll get a corrupt position. // Otherwise it won't be read by alGetSource and we'll get a corrupt position.
// So in case of a paused source we start it and then immediately pause it. // So in case of a paused source we start it and then immediately pause it.
LOG(("alSourcePause: reattach for %1").arg(state.id.externalPlayId()));
alSourcePause(stream.source); alSourcePause(stream.source);
} }
} }
@ -475,7 +478,7 @@ void Mixer::Track::clear() {
bufferSamples[i] = QByteArray(); bufferSamples[i] = QByteArray();
} }
setVideoData(nullptr); setExternalData(nullptr);
lastUpdateWhen = 0; lastUpdateWhen = 0;
lastUpdatePosition = 0; lastUpdatePosition = 0;
} }
@ -553,11 +556,12 @@ int Mixer::Track::getNotQueuedBufferIndex() {
return -1; return -1;
} }
void Mixer::Track::setVideoData(std::unique_ptr<VideoSoundData> data) { void Mixer::Track::setExternalData(
std::unique_ptr<ExternalSoundData> data) {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS #ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
changeSpeedEffect(data ? data->speed : 1.); changeSpeedEffect(data ? data->speed : 1.);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS #endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
videoData = std::move(data); externalData = std::move(data);
} }
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS #ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
@ -640,8 +644,8 @@ Mixer::~Mixer() {
} }
void Mixer::onUpdated(const AudioMsgId &audio) { void Mixer::onUpdated(const AudioMsgId &audio) {
if (audio.playId()) { if (audio.externalPlayId()) {
videoSoundProgress(audio); externalSoundProgress(audio);
} }
Media::Player::Updated().notify(audio); Media::Player::Updated().notify(audio);
} }
@ -740,7 +744,9 @@ void Mixer::resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered
positionInBuffered = 0; positionInBuffered = 0;
} }
} }
auto fullPosition = track->bufferedPosition + positionInBuffered; auto fullPosition = track->samplesCount[0]
? (track->bufferedPosition + positionInBuffered)
: track->state.position;
track->state.position = fullPosition; track->state.position = fullPosition;
track->fadeStartPosition = fullPosition; track->fadeStartPosition = fullPosition;
} }
@ -776,9 +782,9 @@ void Mixer::play(const AudioMsgId &audio, crl::time positionMs) {
void Mixer::play( void Mixer::play(
const AudioMsgId &audio, const AudioMsgId &audio,
std::unique_ptr<VideoSoundData> videoData, std::unique_ptr<ExternalSoundData> externalData,
crl::time positionMs) { crl::time positionMs) {
Expects(!videoData || audio.playId() != 0); Expects((externalData != nullptr) == (audio.externalPlayId() != 0));
auto type = audio.type(); auto type = audio.type();
AudioMsgId stopped; AudioMsgId stopped;
@ -839,24 +845,33 @@ void Mixer::play(
} }
} }
current->state.id = audio; if (current->state.id != audio) {
current->started(); // Clear all previous state.
current->state.id = audio;
}
current->lastUpdateWhen = 0; current->lastUpdateWhen = 0;
current->lastUpdatePosition = 0; current->lastUpdatePosition = 0;
if (videoData) { if (externalData) {
current->setVideoData(std::move(videoData)); current->setExternalData(std::move(externalData));
} else { } else {
current->setVideoData(nullptr); current->setExternalData(nullptr);
current->file = audio.audio()->location(true); current->file = audio.audio()->location(true);
current->data = audio.audio()->data(); current->data = audio.audio()->data();
notLoadedYet = (current->file.isEmpty() && current->data.isEmpty()); notLoadedYet = (current->file.isEmpty() && current->data.isEmpty());
} }
if (notLoadedYet) { if (notLoadedYet) {
auto newState = (type == AudioMsgId::Type::Song) ? State::Stopped : State::StoppedAtError; auto newState = (type == AudioMsgId::Type::Song)
? State::Stopped
: State::StoppedAtError;
setStoppedState(current, newState); setStoppedState(current, newState);
} else { } else {
current->state.position = (positionMs * current->state.frequency) current->state.position = (positionMs * current->state.frequency)
/ 1000LL; / 1000LL;
current->state.state = current->videoData ? State::Paused : fadedStart ? State::Starting : State::Playing; current->state.state = current->externalData
? State::Paused
: fadedStart
? State::Starting
: State::Playing;
current->loading = true; current->loading = true;
emit loaderOnStart(current->state.id, positionMs); emit loaderOnStart(current->state.id, positionMs);
if (type == AudioMsgId::Type::Voice) { if (type == AudioMsgId::Type::Voice) {
@ -879,63 +894,60 @@ void Mixer::play(
} }
} }
void Mixer::feedFromVideo(const VideoSoundPart &part) { void Mixer::feedFromExternal(ExternalSoundPart &&part) {
_loader->feedFromVideo(part); _loader->feedFromExternal(std::move(part));
} }
void Mixer::forceToBufferVideo(const AudioMsgId &audioId) { void Mixer::forceToBufferExternal(const AudioMsgId &audioId) {
_loader->forceToBufferVideo(audioId); _loader->forceToBufferExternal(audioId);
} }
void Mixer::setSpeedFromVideo(const AudioMsgId &audioId, float64 speed) { void Mixer::setSpeedFromExternal(const AudioMsgId &audioId, float64 speed) {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS #ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
const auto track = trackForType(AudioMsgId::Type::Video); const auto track = trackForType(audioId.type());
if (track->state.id == audioId) { if (track->state.id == audioId) {
track->changeSpeedEffect(speed); track->changeSpeedEffect(speed);
} }
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS #endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
} }
Streaming::TimePoint Mixer::getVideoSyncTimePoint( Streaming::TimePoint Mixer::getExternalSyncTimePoint(
const AudioMsgId &audio) const { const AudioMsgId &audio) const {
Expects(audio.type() == AudioMsgId::Type::Video); Expects(audio.externalPlayId() != 0);
Expects(audio.playId() != 0);
auto result = Streaming::TimePoint(); auto result = Streaming::TimePoint();
const auto playId = audio.playId(); const auto type = audio.type();
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
const auto track = trackForType(AudioMsgId::Type::Video); const auto track = trackForType(type);
if (track->state.id.playId() == playId && track->lastUpdateWhen > 0) { if (track && track->state.id == audio && track->lastUpdateWhen > 0) {
result.trackTime = track->lastUpdatePosition; result.trackTime = track->lastUpdatePosition;
result.worldTime = track->lastUpdateWhen; result.worldTime = track->lastUpdateWhen;
} }
return result; return result;
} }
crl::time Mixer::getVideoCorrectedTime(const AudioMsgId &audio, crl::time frameMs, crl::time systemMs) { crl::time Mixer::getExternalCorrectedTime(const AudioMsgId &audio, crl::time frameMs, crl::time systemMs) {
auto result = frameMs; auto result = frameMs;
const auto type = audio.type();
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
auto type = audio.type(); const auto track = trackForType(type);
auto track = trackForType(type);
if (track && track->state.id == audio && track->lastUpdateWhen > 0) { if (track && track->state.id == audio && track->lastUpdateWhen > 0) {
result = static_cast<crl::time>(track->lastUpdatePosition); result = static_cast<crl::time>(track->lastUpdatePosition);
if (systemMs > track->lastUpdateWhen) { if (systemMs > track->lastUpdateWhen) {
result += (systemMs - track->lastUpdateWhen); result += (systemMs - track->lastUpdateWhen);
} }
} }
return result; return result;
} }
void Mixer::videoSoundProgress(const AudioMsgId &audio) { void Mixer::externalSoundProgress(const AudioMsgId &audio) {
auto type = audio.type(); const auto type = audio.type();
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
const auto current = trackForType(type);
auto current = trackForType(type);
if (current && current->state.length && current->state.frequency) { if (current && current->state.length && current->state.frequency) {
if (current->state.id == audio && current->state.state == State::Playing) { if (current->state.id == audio && current->state.state == State::Playing) {
current->lastUpdateWhen = crl::now(); current->lastUpdateWhen = crl::now();
@ -947,7 +959,7 @@ void Mixer::videoSoundProgress(const AudioMsgId &audio) {
bool Mixer::checkCurrentALError(AudioMsgId::Type type) { bool Mixer::checkCurrentALError(AudioMsgId::Type type) {
if (!Audio::PlaybackErrorHappened()) return true; if (!Audio::PlaybackErrorHappened()) return true;
auto data = trackForType(type); const auto data = trackForType(type);
if (!data) { if (!data) {
setStoppedState(data, State::StoppedAtError); setStoppedState(data, State::StoppedAtError);
onError(data->state.id); onError(data->state.id);
@ -1044,6 +1056,7 @@ void Mixer::resume(const AudioMsgId &audio, bool fast) {
alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL)); alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL));
if (!checkCurrentALError(type)) return; if (!checkCurrentALError(type)) return;
} }
LOG(("alSourcePlay: resume for: %1").arg(track->state.id.externalPlayId()));
alSourcePlay(track->stream.source); alSourcePlay(track->stream.source);
if (!checkCurrentALError(type)) return; if (!checkCurrentALError(type)) return;
} }
@ -1134,12 +1147,13 @@ void Mixer::stop(const AudioMsgId &audio) {
return; return;
} }
current = track->state.id; current = audio;
fadedStop(type); fadedStop(type);
if (type == AudioMsgId::Type::Voice) { if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong(); emit unsuppressSong();
} else if (type == AudioMsgId::Type::Video) { } else if (type == AudioMsgId::Type::Video) {
track->clear(); track->clear();
emit loaderOnCancel(audio);
} }
} }
if (current) emit updated(current); if (current) emit updated(current);
@ -1153,11 +1167,13 @@ void Mixer::stop(const AudioMsgId &audio, State state) {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
auto type = audio.type(); auto type = audio.type();
auto track = trackForType(type); auto track = trackForType(type);
if (!track || track->state.id != audio || IsStopped(track->state.state)) { if (!track
|| track->state.id != audio
|| IsStopped(track->state.state)) {
return; return;
} }
current = track->state.id; current = audio;
setStoppedState(track, state); setStoppedState(track, state);
if (type == AudioMsgId::Type::Voice) { if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong(); emit unsuppressSong();
@ -1247,6 +1263,7 @@ void Mixer::setStoppedState(Track *current, State state) {
alSourceStop(current->stream.source); alSourceStop(current->stream.source);
alSourcef(current->stream.source, AL_GAIN, 1); alSourcef(current->stream.source, AL_GAIN, 1);
} }
emit loaderOnCancel(current->state.id);
} }
void Mixer::clearStoppedAtStart(const AudioMsgId &audio) { void Mixer::clearStoppedAtStart(const AudioMsgId &audio) {
@ -1508,6 +1525,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
track->state.position = fullPosition; track->state.position = fullPosition;
emitSignals |= EmitPositionUpdated; emitSignals |= EmitPositionUpdated;
} else if (track->state.waitingForData && !waitingForDataOld) { } else if (track->state.waitingForData && !waitingForDataOld) {
LOG(("WAITING FOR DATA FOR: %1.").arg(track->state.id.externalPlayId()));
if (fullPosition > track->state.position) { if (fullPosition > track->state.position) {
track->state.position = fullPosition; track->state.position = fullPosition;
} }
@ -1621,7 +1639,6 @@ void DetachFromDevice(not_null<Audio::Instance*> instance) {
} // namespace internal } // namespace internal
} // namespace Player } // namespace Player
} // namespace Media
class FFMpegAttributesReader : public AbstractFFMpegLoader { class FFMpegAttributesReader : public AbstractFFMpegLoader {
public: public:
@ -1675,7 +1692,7 @@ public:
void trySet(QString &to, AVDictionary *dict, const char *key) { void trySet(QString &to, AVDictionary *dict, const char *key) {
if (!to.isEmpty()) return; if (!to.isEmpty()) return;
if (AVDictionaryEntry* tag = av_dict_get(dict, key, 0, 0)) { if (AVDictionaryEntry* tag = av_dict_get(dict, key, nullptr, 0)) {
to = QString::fromUtf8(tag->value); to = QString::fromUtf8(tag->value);
} }
} }
@ -1731,7 +1748,6 @@ private:
}; };
namespace Media {
namespace Player { namespace Player {
FileMediaInformation::Song PrepareForSending(const QString &fname, const QByteArray &data) { FileMediaInformation::Song PrepareForSending(const QString &fname, const QByteArray &data) {
@ -1748,7 +1764,6 @@ FileMediaInformation::Song PrepareForSending(const QString &fname, const QByteAr
} }
} // namespace Player } // namespace Player
} // namespace Media
class FFMpegWaveformCounter : public FFMpegLoader { class FFMpegWaveformCounter : public FFMpegLoader {
public: public:
@ -1834,8 +1849,12 @@ private:
}; };
VoiceWaveform audioCountWaveform(const FileLocation &file, const QByteArray &data) { } // namespace Media
FFMpegWaveformCounter counter(file, data);
VoiceWaveform audioCountWaveform(
const FileLocation &file,
const QByteArray &data) {
Media::FFMpegWaveformCounter counter(file, data);
const auto positionMs = crl::time(0); const auto positionMs = crl::time(0);
if (counter.open(positionMs)) { if (counter.open(positionMs)) {
return counter.waveform(); return counter.waveform();

View File

@ -10,14 +10,18 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "storage/localimageloader.h" #include "storage/localimageloader.h"
#include "base/bytes.h" #include "base/bytes.h"
struct VideoSoundData; namespace Media {
struct VideoSoundPart; struct ExternalSoundData;
struct ExternalSoundPart;
} // namespace Media
namespace Media { namespace Media {
namespace Streaming { namespace Streaming {
struct TimePoint; struct TimePoint;
} // namespace Streaming } // namespace Streaming
} // namespace Media
namespace Media {
namespace Audio { namespace Audio {
class Instance; class Instance;
@ -126,7 +130,7 @@ public:
void play(const AudioMsgId &audio, crl::time positionMs = 0); void play(const AudioMsgId &audio, crl::time positionMs = 0);
void play( void play(
const AudioMsgId &audio, const AudioMsgId &audio,
std::unique_ptr<VideoSoundData> videoData, std::unique_ptr<ExternalSoundData> externalData,
crl::time positionMs = 0); crl::time positionMs = 0);
void pause(const AudioMsgId &audio, bool fast = false); void pause(const AudioMsgId &audio, bool fast = false);
void resume(const AudioMsgId &audio, bool fast = false); void resume(const AudioMsgId &audio, bool fast = false);
@ -134,13 +138,13 @@ public:
void stop(const AudioMsgId &audio); void stop(const AudioMsgId &audio);
void stop(const AudioMsgId &audio, State state); void stop(const AudioMsgId &audio, State state);
// Video player audio stream interface. // External player audio stream interface.
void feedFromVideo(const VideoSoundPart &part); void feedFromExternal(ExternalSoundPart &&part);
void forceToBufferVideo(const AudioMsgId &audioId); void forceToBufferExternal(const AudioMsgId &audioId);
void setSpeedFromVideo(const AudioMsgId &audioId, float64 speed); void setSpeedFromExternal(const AudioMsgId &audioId, float64 speed);
Streaming::TimePoint getVideoSyncTimePoint( Streaming::TimePoint getExternalSyncTimePoint(
const AudioMsgId &audio) const; const AudioMsgId &audio) const;
crl::time getVideoCorrectedTime( crl::time getExternalCorrectedTime(
const AudioMsgId &id, const AudioMsgId &id,
crl::time frameMs, crl::time frameMs,
crl::time systemMs); crl::time systemMs);
@ -194,7 +198,7 @@ private:
void resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered = -1); void resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered = -1);
bool checkCurrentALError(AudioMsgId::Type type); bool checkCurrentALError(AudioMsgId::Type type);
void videoSoundProgress(const AudioMsgId &audio); void externalSoundProgress(const AudioMsgId &audio);
class Track { class Track {
public: public:
@ -212,7 +216,7 @@ private:
int getNotQueuedBufferIndex(); int getNotQueuedBufferIndex();
void setVideoData(std::unique_ptr<VideoSoundData> data); void setExternalData(std::unique_ptr<ExternalSoundData> data);
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS #ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void changeSpeedEffect(float64 speed); void changeSpeedEffect(float64 speed);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS #endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
@ -239,7 +243,7 @@ private:
uint32 buffers[kBuffersCount] = { 0 }; uint32 buffers[kBuffersCount] = { 0 };
}; };
Stream stream; Stream stream;
std::unique_ptr<VideoSoundData> videoData; std::unique_ptr<ExternalSoundData> externalData;
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS #ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
struct SpeedEffect { struct SpeedEffect {

View File

@ -9,9 +9,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/bytes.h" #include "base/bytes.h"
namespace Media {
uint64_t AbstractFFMpegLoader::ComputeChannelLayout( uint64_t AbstractFFMpegLoader::ComputeChannelLayout(
uint64_t channel_layout, uint64_t channel_layout,
int channels) { int channels) {
if (channel_layout) { if (channel_layout) {
if (av_get_channel_layout_nb_channels(channel_layout) == channels) { if (av_get_channel_layout_nb_channels(channel_layout) == channels) {
return channel_layout; return channel_layout;
@ -32,13 +34,13 @@ bool AbstractFFMpegLoader::open(crl::time positionMs) {
int res = 0; int res = 0;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
ioBuffer = (uchar*)av_malloc(AVBlockSize); ioBuffer = (uchar *)av_malloc(AVBlockSize);
if (!_data.isEmpty()) { if (!_data.isEmpty()) {
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void*>(this), &AbstractFFMpegLoader::_read_data, 0, &AbstractFFMpegLoader::_seek_data); ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void *>(this), &AbstractFFMpegLoader::_read_data, 0, &AbstractFFMpegLoader::_seek_data);
} else if (!_bytes.empty()) { } else if (!_bytes.empty()) {
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void*>(this), &AbstractFFMpegLoader::_read_bytes, 0, &AbstractFFMpegLoader::_seek_bytes); ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void *>(this), &AbstractFFMpegLoader::_read_bytes, 0, &AbstractFFMpegLoader::_seek_bytes);
} else { } else {
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void*>(this), &AbstractFFMpegLoader::_read_file, 0, &AbstractFFMpegLoader::_seek_file); ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void *>(this), &AbstractFFMpegLoader::_read_file, 0, &AbstractFFMpegLoader::_seek_file);
} }
fmtContext = avformat_alloc_context(); fmtContext = avformat_alloc_context();
if (!fmtContext) { if (!fmtContext) {
@ -97,7 +99,7 @@ AbstractFFMpegLoader::~AbstractFFMpegLoader() {
} }
int AbstractFFMpegLoader::_read_data(void *opaque, uint8_t *buf, int buf_size) { int AbstractFFMpegLoader::_read_data(void *opaque, uint8_t *buf, int buf_size) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque); auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
auto nbytes = qMin(l->_data.size() - l->_dataPos, int32(buf_size)); auto nbytes = qMin(l->_data.size() - l->_dataPos, int32(buf_size));
if (nbytes <= 0) { if (nbytes <= 0) {
@ -110,7 +112,7 @@ int AbstractFFMpegLoader::_read_data(void *opaque, uint8_t *buf, int buf_size) {
} }
int64_t AbstractFFMpegLoader::_seek_data(void *opaque, int64_t offset, int whence) { int64_t AbstractFFMpegLoader::_seek_data(void *opaque, int64_t offset, int whence) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque); auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
int32 newPos = -1; int32 newPos = -1;
switch (whence) { switch (whence) {
@ -130,7 +132,7 @@ int64_t AbstractFFMpegLoader::_seek_data(void *opaque, int64_t offset, int whenc
} }
int AbstractFFMpegLoader::_read_bytes(void *opaque, uint8_t *buf, int buf_size) { int AbstractFFMpegLoader::_read_bytes(void *opaque, uint8_t *buf, int buf_size) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque); auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
auto nbytes = qMin(static_cast<int>(l->_bytes.size()) - l->_dataPos, buf_size); auto nbytes = qMin(static_cast<int>(l->_bytes.size()) - l->_dataPos, buf_size);
if (nbytes <= 0) { if (nbytes <= 0) {
@ -143,14 +145,15 @@ int AbstractFFMpegLoader::_read_bytes(void *opaque, uint8_t *buf, int buf_size)
} }
int64_t AbstractFFMpegLoader::_seek_bytes(void *opaque, int64_t offset, int whence) { int64_t AbstractFFMpegLoader::_seek_bytes(void *opaque, int64_t offset, int whence) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque); auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
int32 newPos = -1; int32 newPos = -1;
switch (whence) { switch (whence) {
case SEEK_SET: newPos = offset; break; case SEEK_SET: newPos = offset; break;
case SEEK_CUR: newPos = l->_dataPos + offset; break; case SEEK_CUR: newPos = l->_dataPos + offset; break;
case SEEK_END: newPos = static_cast<int>(l->_bytes.size()) + offset; break; case SEEK_END: newPos = static_cast<int>(l->_bytes.size()) + offset; break;
case AVSEEK_SIZE: { case AVSEEK_SIZE:
{
// Special whence for determining filesize without any seek. // Special whence for determining filesize without any seek.
return l->_bytes.size(); return l->_bytes.size();
} break; } break;
@ -163,18 +166,19 @@ int64_t AbstractFFMpegLoader::_seek_bytes(void *opaque, int64_t offset, int when
} }
int AbstractFFMpegLoader::_read_file(void *opaque, uint8_t *buf, int buf_size) { int AbstractFFMpegLoader::_read_file(void *opaque, uint8_t *buf, int buf_size) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque); auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
return int(l->_f.read((char*)(buf), buf_size)); return int(l->_f.read((char *)(buf), buf_size));
} }
int64_t AbstractFFMpegLoader::_seek_file(void *opaque, int64_t offset, int whence) { int64_t AbstractFFMpegLoader::_seek_file(void *opaque, int64_t offset, int whence) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque); auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
switch (whence) { switch (whence) {
case SEEK_SET: return l->_f.seek(offset) ? l->_f.pos() : -1; case SEEK_SET: return l->_f.seek(offset) ? l->_f.pos() : -1;
case SEEK_CUR: return l->_f.seek(l->_f.pos() + offset) ? l->_f.pos() : -1; case SEEK_CUR: return l->_f.seek(l->_f.pos() + offset) ? l->_f.pos() : -1;
case SEEK_END: return l->_f.seek(l->_f.size() + offset) ? l->_f.pos() : -1; case SEEK_END: return l->_f.seek(l->_f.size() + offset) ? l->_f.pos() : -1;
case AVSEEK_SIZE: { case AVSEEK_SIZE:
{
// Special whence for determining filesize without any seek. // Special whence for determining filesize without any seek.
return l->_f.size(); return l->_f.size();
} break; } break;
@ -186,14 +190,14 @@ AbstractAudioFFMpegLoader::AbstractAudioFFMpegLoader(
const FileLocation &file, const FileLocation &file,
const QByteArray &data, const QByteArray &data,
bytes::vector &&buffer) bytes::vector &&buffer)
: AbstractFFMpegLoader(file, data, std::move(buffer)) { : AbstractFFMpegLoader(file, data, std::move(buffer))
_frame = av_frame_alloc(); , _frame(Streaming::MakeFramePointer()) {
} }
bool AbstractAudioFFMpegLoader::initUsingContext( bool AbstractAudioFFMpegLoader::initUsingContext(
not_null<AVCodecContext*> context, not_null<AVCodecContext *> context,
int64 initialCount, int64 initialCount,
int initialFrequency) { int initialFrequency) {
const auto layout = ComputeChannelLayout( const auto layout = ComputeChannelLayout(
context->channel_layout, context->channel_layout,
context->channels); context->channels);
@ -260,22 +264,20 @@ bool AbstractAudioFFMpegLoader::initUsingContext(
} }
auto AbstractAudioFFMpegLoader::replaceFrameAndRead( auto AbstractAudioFFMpegLoader::replaceFrameAndRead(
not_null<AVFrame*> frame, Streaming::FramePointer frame,
QByteArray &result, QByteArray &result,
int64 &samplesAdded) int64 &samplesAdded)
-> ReadResult { -> ReadResult {
av_frame_free(&_frame); _frame = std::move(frame);
_frame = frame;
return readFromReadyFrame(result, samplesAdded); return readFromReadyFrame(result, samplesAdded);
} }
auto AbstractAudioFFMpegLoader::readFromReadyContext( auto AbstractAudioFFMpegLoader::readFromReadyContext(
not_null<AVCodecContext*> context, not_null<AVCodecContext *> context,
QByteArray &result, QByteArray &result,
int64 &samplesAdded) int64 &samplesAdded)
-> ReadResult { -> ReadResult {
av_frame_unref(_frame); const auto res = avcodec_receive_frame(context, _frame.get());
const auto res = avcodec_receive_frame(context, _frame);
if (res >= 0) { if (res >= 0) {
return readFromReadyFrame(result, samplesAdded); return readFromReadyFrame(result, samplesAdded);
} }
@ -427,19 +429,19 @@ bool AbstractAudioFFMpegLoader::ensureResampleSpaceAvailable(int samples) {
} }
void AbstractAudioFFMpegLoader::appendSamples( void AbstractAudioFFMpegLoader::appendSamples(
QByteArray &result, QByteArray & result,
int64 &samplesAdded, int64 & samplesAdded,
uint8_t **data, uint8_t * *data,
int count) const { int count) const {
result.append( result.append(
reinterpret_cast<const char*>(data[0]), reinterpret_cast<const char *>(data[0]),
count * _outputSampleSize); count * _outputSampleSize);
samplesAdded += count; samplesAdded += count;
} }
AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame( AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
QByteArray &result, QByteArray & result,
int64 &samplesAdded) { int64 & samplesAdded) {
if (frameHasDesiredFormat()) { if (frameHasDesiredFormat()) {
appendSamples( appendSamples(
result, result,
@ -463,7 +465,7 @@ AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
_swrContext, _swrContext,
_swrDstData, _swrDstData,
maxSamples, maxSamples,
(const uint8_t**)_frame->extended_data, (const uint8_t * *)_frame->extended_data,
_frame->nb_samples); _frame->nb_samples);
if (samples < 0) { if (samples < 0) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
@ -496,13 +498,12 @@ AbstractAudioFFMpegLoader::~AbstractAudioFFMpegLoader() {
} }
av_freep(&_swrDstData); av_freep(&_swrDstData);
} }
av_frame_free(&_frame);
} }
FFMpegLoader::FFMpegLoader( FFMpegLoader::FFMpegLoader(
const FileLocation &file, const FileLocation & file,
const QByteArray &data, const QByteArray & data,
bytes::vector &&buffer) bytes::vector && buffer)
: AbstractAudioFFMpegLoader(file, data, std::move(buffer)) { : AbstractAudioFFMpegLoader(file, data, std::move(buffer)) {
} }
@ -582,8 +583,8 @@ bool FFMpegLoader::seekTo(crl::time positionMs) {
} }
AudioPlayerLoader::ReadResult FFMpegLoader::readMore( AudioPlayerLoader::ReadResult FFMpegLoader::readMore(
QByteArray &result, QByteArray & result,
int64 &samplesAdded) { int64 & samplesAdded) {
const auto readResult = readFromReadyContext( const auto readResult = readFromReadyContext(
_codecContext, _codecContext,
result, result,
@ -641,3 +642,5 @@ FFMpegLoader::~FFMpegLoader() {
avcodec_free_context(&_codecContext); avcodec_free_context(&_codecContext);
} }
} }
} // namespace Media

View File

@ -9,6 +9,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio.h" #include "media/audio/media_audio.h"
#include "media/audio/media_audio_loader.h" #include "media/audio/media_audio_loader.h"
#include "media/streaming/media_streaming_utility.h"
extern "C" { extern "C" {
#include <libavcodec/avcodec.h> #include <libavcodec/avcodec.h>
@ -19,6 +20,8 @@ extern "C" {
#include <AL/al.h> #include <AL/al.h>
namespace Media {
class AbstractFFMpegLoader : public AudioPlayerLoader { class AbstractFFMpegLoader : public AudioPlayerLoader {
public: public:
AbstractFFMpegLoader( AbstractFFMpegLoader(
@ -91,18 +94,18 @@ public:
protected: protected:
bool initUsingContext( bool initUsingContext(
not_null<AVCodecContext*> context, not_null<AVCodecContext *> context,
int64 initialCount, int64 initialCount,
int initialFrequency); int initialFrequency);
ReadResult readFromReadyContext( ReadResult readFromReadyContext(
not_null<AVCodecContext*> context, not_null<AVCodecContext *> context,
QByteArray &result, QByteArray &result,
int64 &samplesAdded); int64 &samplesAdded);
// Streaming player provides the first frame to the ChildFFMpegLoader // Streaming player provides the first frame to the ChildFFMpegLoader
// so we replace our allocated frame with the one provided. // so we replace our allocated frame with the one provided.
ReadResult replaceFrameAndRead( ReadResult replaceFrameAndRead(
not_null<AVFrame*> frame, Streaming::FramePointer frame,
QByteArray &result, QByteArray &result,
int64 &samplesAdded); int64 &samplesAdded);
@ -123,7 +126,7 @@ private:
uint8_t **data, uint8_t **data,
int count) const; int count) const;
AVFrame *_frame = nullptr; Streaming::FramePointer _frame;
int _outputFormat = AL_FORMAT_STEREO16; int _outputFormat = AL_FORMAT_STEREO16;
int _outputChannels = 2; int _outputChannels = 2;
int _outputSampleSize = 2 * sizeof(uint16); int _outputSampleSize = 2 * sizeof(uint16);
@ -164,3 +167,5 @@ private:
AVPacket _packet; AVPacket _packet;
}; };
} // namespace Media

View File

@ -7,7 +7,12 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/ */
#include "media/audio/media_audio_loader.h" #include "media/audio/media_audio_loader.h"
AudioPlayerLoader::AudioPlayerLoader(const FileLocation &file, const QByteArray &data, bytes::vector &&buffer) namespace Media {
AudioPlayerLoader::AudioPlayerLoader(
const FileLocation &file,
const QByteArray &data,
bytes::vector &&buffer)
: _file(file) : _file(file)
, _data(data) , _data(data)
, _bytes(std::move(buffer)) { , _bytes(std::move(buffer)) {
@ -20,23 +25,31 @@ AudioPlayerLoader::~AudioPlayerLoader() {
} }
} }
bool AudioPlayerLoader::check(const FileLocation &file, const QByteArray &data) { bool AudioPlayerLoader::check(
return this->_file == file && this->_data.size() == data.size(); const FileLocation &file,
const QByteArray &data) {
return (this->_file == file) && (this->_data.size() == data.size());
} }
void AudioPlayerLoader::saveDecodedSamples(QByteArray *samples, int64 *samplesCount) { void AudioPlayerLoader::saveDecodedSamples(
Assert(_savedSamplesCount == 0); not_null<QByteArray*> samples,
Assert(_savedSamples.isEmpty()); not_null<int64*> samplesCount) {
Assert(!_holdsSavedSamples); Expects(_savedSamplesCount == 0);
Expects(_savedSamples.isEmpty());
Expects(!_holdsSavedSamples);
samples->swap(_savedSamples); samples->swap(_savedSamples);
std::swap(*samplesCount, _savedSamplesCount); std::swap(*samplesCount, _savedSamplesCount);
_holdsSavedSamples = true; _holdsSavedSamples = true;
} }
void AudioPlayerLoader::takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount) { void AudioPlayerLoader::takeSavedDecodedSamples(
Assert(*samplesCount == 0); not_null<QByteArray*> samples,
Assert(samples->isEmpty()); not_null<int64*> samplesCount) {
Assert(_holdsSavedSamples); Expects(*samplesCount == 0);
Expects(samples->isEmpty());
Expects(_holdsSavedSamples);
samples->swap(_savedSamples); samples->swap(_savedSamples);
std::swap(*samplesCount, _savedSamplesCount); std::swap(*samplesCount, _savedSamplesCount);
_holdsSavedSamples = false; _holdsSavedSamples = false;
@ -51,17 +64,29 @@ bool AudioPlayerLoader::openFile() {
if (_f.isOpen()) _f.close(); if (_f.isOpen()) _f.close();
if (!_access) { if (!_access) {
if (!_file.accessEnable()) { if (!_file.accessEnable()) {
LOG(("Audio Error: could not open file access '%1', data size '%2', error %3, %4").arg(_file.name()).arg(_data.size()).arg(_f.error()).arg(_f.errorString())); LOG(("Audio Error: could not open file access '%1', "
"data size '%2', error %3, %4"
).arg(_file.name()
).arg(_data.size()
).arg(_f.error()
).arg(_f.errorString()));
return false; return false;
} }
_access = true; _access = true;
} }
_f.setFileName(_file.name()); _f.setFileName(_file.name());
if (!_f.open(QIODevice::ReadOnly)) { if (!_f.open(QIODevice::ReadOnly)) {
LOG(("Audio Error: could not open file '%1', data size '%2', error %3, %4").arg(_file.name()).arg(_data.size()).arg(_f.error()).arg(_f.errorString())); LOG(("Audio Error: could not open file '%1', "
"data size '%2', error %3, %4"
).arg(_file.name()
).arg(_data.size()
).arg(_f.error()
).arg(_f.errorString()));
return false; return false;
} }
} }
_dataPos = 0; _dataPos = 0;
return true; return true;
} }
} // namespace Media

View File

@ -8,10 +8,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#pragma once #pragma once
#include "base/bytes.h" #include "base/bytes.h"
#include "media/streaming/media_streaming_utility.h"
namespace FFMpeg { namespace Media {
struct AVPacketDataWrap;
} // namespace FFMpeg
class AudioPlayerLoader { class AudioPlayerLoader {
public: public:
@ -35,9 +34,10 @@ public:
Wait, Wait,
EndOfFile, EndOfFile,
}; };
virtual ReadResult readMore(QByteArray &samples, int64 &samplesCount) = 0; virtual ReadResult readMore(
virtual void enqueuePackets( QByteArray &samples,
QQueue<FFMpeg::AVPacketDataWrap> &&packets) { int64 &samplesCount) = 0;
virtual void enqueuePackets(std::deque<Streaming::Packet> &&packets) {
Unexpected("enqueuePackets() call on not ChildFFMpegLoader."); Unexpected("enqueuePackets() call on not ChildFFMpegLoader.");
} }
virtual void setForceToBuffer(bool force) { virtual void setForceToBuffer(bool force) {
@ -47,8 +47,12 @@ public:
return false; return false;
} }
void saveDecodedSamples(QByteArray *samples, int64 *samplesCount); void saveDecodedSamples(
void takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount); not_null<QByteArray*> samples,
not_null<int64*> samplesCount);
void takeSavedDecodedSamples(
not_null<QByteArray*> samples,
not_null<int64*> samplesCount);
bool holdsSavedDecodedSamples() const; bool holdsSavedDecodedSamples() const;
protected: protected:
@ -68,3 +72,5 @@ private:
bool _holdsSavedSamples = false; bool _holdsSavedSamples = false;
}; };
} // namespace Media

View File

@ -20,46 +20,46 @@ constexpr auto kPlaybackBufferSize = 256 * 1024;
} // namespace } // namespace
Loaders::Loaders(QThread *thread) Loaders::Loaders(QThread *thread)
: _fromVideoNotify([=] { videoSoundAdded(); }) { : _fromExternalNotify([=] { videoSoundAdded(); }) {
moveToThread(thread); moveToThread(thread);
_fromVideoNotify.moveToThread(thread); _fromExternalNotify.moveToThread(thread);
connect(thread, SIGNAL(started()), this, SLOT(onInit())); connect(thread, SIGNAL(started()), this, SLOT(onInit()));
connect(thread, SIGNAL(finished()), this, SLOT(deleteLater())); connect(thread, SIGNAL(finished()), this, SLOT(deleteLater()));
} }
void Loaders::feedFromVideo(const VideoSoundPart &part) { void Loaders::feedFromExternal(ExternalSoundPart &&part) {
auto invoke = false; auto invoke = false;
{ {
QMutexLocker lock(&_fromVideoMutex); QMutexLocker lock(&_fromExternalMutex);
invoke = _fromVideoQueues.empty() invoke = _fromExternalQueues.empty()
&& _fromVideoForceToBuffer.empty(); && _fromExternalForceToBuffer.empty();
_fromVideoQueues[part.audio].enqueue(FFMpeg::dataWrapFromPacket(*part.packet)); _fromExternalQueues[part.audio].push_back(std::move(part.packet));
} }
if (invoke) { if (invoke) {
_fromVideoNotify.call(); _fromExternalNotify.call();
} }
} }
void Loaders::forceToBufferVideo(const AudioMsgId &audioId) { void Loaders::forceToBufferExternal(const AudioMsgId &audioId) {
auto invoke = false; auto invoke = false;
{ {
QMutexLocker lock(&_fromVideoMutex); QMutexLocker lock(&_fromExternalMutex);
invoke = _fromVideoQueues.empty() invoke = _fromExternalQueues.empty()
&& _fromVideoForceToBuffer.empty(); && _fromExternalForceToBuffer.empty();
_fromVideoForceToBuffer.emplace(audioId); _fromExternalForceToBuffer.emplace(audioId);
} }
if (invoke) { if (invoke) {
_fromVideoNotify.call(); _fromExternalNotify.call();
} }
} }
void Loaders::videoSoundAdded() { void Loaders::videoSoundAdded() {
auto queues = decltype(_fromVideoQueues)(); auto queues = decltype(_fromExternalQueues)();
auto forces = decltype(_fromVideoForceToBuffer)(); auto forces = decltype(_fromExternalForceToBuffer)();
{ {
QMutexLocker lock(&_fromVideoMutex); QMutexLocker lock(&_fromExternalMutex);
queues = base::take(_fromVideoQueues); queues = base::take(_fromExternalQueues);
forces = base::take(_fromVideoForceToBuffer); forces = base::take(_fromExternalForceToBuffer);
} }
for (const auto &audioId : forces) { for (const auto &audioId : forces) {
const auto tryLoader = [&](const auto &id, auto &loader) { const auto tryLoader = [&](const auto &id, auto &loader) {
@ -90,34 +90,9 @@ void Loaders::videoSoundAdded() {
} }
return false; return false;
}; };
const auto used = tryLoader(_audio, _audioLoader) tryLoader(_audio, _audioLoader)
|| tryLoader(_song, _songLoader) || tryLoader(_song, _songLoader)
|| tryLoader(_video, _videoLoader); || tryLoader(_video, _videoLoader);
if (!used) {
for (auto &packetData : packets) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}
}
}
Loaders::~Loaders() {
QMutexLocker lock(&_fromVideoMutex);
clearFromVideoQueue();
}
void Loaders::clearFromVideoQueue() {
auto queues = base::take(_fromVideoQueues);
for (auto &pair : queues) {
const auto audioId = pair.first;
auto &packets = pair.second;
for (auto &packetData : packets) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
} }
} }
@ -144,7 +119,13 @@ AudioMsgId Loaders::clear(AudioMsgId::Type type) {
AudioMsgId result; AudioMsgId result;
switch (type) { switch (type) {
case AudioMsgId::Type::Voice: std::swap(result, _audio); _audioLoader = nullptr; break; case AudioMsgId::Type::Voice: std::swap(result, _audio); _audioLoader = nullptr; break;
case AudioMsgId::Type::Song: std::swap(result, _song); _songLoader = nullptr; break; case AudioMsgId::Type::Song:
if (_songLoader) {
LOG(("SONG LOADER KILLED FOR: %1.").arg(_song.externalPlayId()));
}
std::swap(result, _song);
_songLoader = nullptr;
break;
case AudioMsgId::Type::Video: std::swap(result, _video); _videoLoader = nullptr; break; case AudioMsgId::Type::Video: std::swap(result, _video); _videoLoader = nullptr; break;
} }
return result; return result;
@ -324,6 +305,7 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
return; return;
} }
} }
LOG(("alSourcePlay: loader for: %1").arg(track->state.id.externalPlayId()));
alSourcePlay(track->stream.source); alSourcePlay(track->stream.source);
if (!internal::audioCheckError()) { if (!internal::audioCheckError()) {
setStoppedState(track, State::StoppedAtError); setStoppedState(track, State::StoppedAtError);
@ -371,17 +353,21 @@ AudioPlayerLoader *Loaders::setupLoader(
case AudioMsgId::Type::Video: _video = audio; loader = &_videoLoader; break; case AudioMsgId::Type::Video: _video = audio; loader = &_videoLoader; break;
} }
if (audio.playId()) { if (audio.externalPlayId()) {
if (!track->videoData) { if (!track->externalData) {
clear(audio.type()); clear(audio.type());
track->state.state = State::StoppedAtError; track->state.state = State::StoppedAtError;
emit error(audio); emit error(audio);
LOG(("Audio Error: video sound data not ready")); LOG(("Audio Error: video sound data not ready"));
return nullptr; return nullptr;
} }
*loader = std::make_unique<ChildFFMpegLoader>(std::move(track->videoData)); *loader = std::make_unique<ChildFFMpegLoader>(
std::move(track->externalData));
} else { } else {
*loader = std::make_unique<FFMpegLoader>(track->file, track->data, bytes::vector()); *loader = std::make_unique<FFMpegLoader>(
track->file,
track->data,
bytes::vector());
} }
l = loader->get(); l = loader->get();
@ -444,5 +430,7 @@ void Loaders::onCancel(const AudioMsgId &audio) {
} }
} }
Loaders::~Loaders() = default;
} // namespace Player } // namespace Player
} // namespace Media } // namespace Media

View File

@ -21,15 +21,15 @@ class Loaders : public QObject {
public: public:
Loaders(QThread *thread); Loaders(QThread *thread);
void feedFromVideo(const VideoSoundPart &part); void feedFromExternal(ExternalSoundPart &&part);
void forceToBufferVideo(const AudioMsgId &audioId); void forceToBufferExternal(const AudioMsgId &audioId);
~Loaders(); ~Loaders();
signals: signals:
void error(const AudioMsgId &audio); void error(const AudioMsgId &audio);
void needToCheck(); void needToCheck();
public slots: public slots:
void onInit(); void onInit();
void onStart(const AudioMsgId &audio, qint64 positionMs); void onStart(const AudioMsgId &audio, qint64 positionMs);
@ -38,17 +38,18 @@ signals:
private: private:
void videoSoundAdded(); void videoSoundAdded();
void clearFromVideoQueue();
AudioMsgId _audio, _song, _video; AudioMsgId _audio, _song, _video;
std::unique_ptr<AudioPlayerLoader> _audioLoader; std::unique_ptr<AudioPlayerLoader> _audioLoader;
std::unique_ptr<AudioPlayerLoader> _songLoader; std::unique_ptr<AudioPlayerLoader> _songLoader;
std::unique_ptr<AudioPlayerLoader> _videoLoader; std::unique_ptr<AudioPlayerLoader> _videoLoader;
QMutex _fromVideoMutex; QMutex _fromExternalMutex;
base::flat_map<AudioMsgId, QQueue<FFMpeg::AVPacketDataWrap>> _fromVideoQueues; base::flat_map<
base::flat_set<AudioMsgId> _fromVideoForceToBuffer; AudioMsgId,
SingleQueuedInvokation _fromVideoNotify; std::deque<Streaming::Packet>> _fromExternalQueues;
base::flat_set<AudioMsgId> _fromExternalForceToBuffer;
SingleQueuedInvokation _fromExternalNotify;
void emitError(AudioMsgId::Type type); void emitError(AudioMsgId::Type type);
AudioMsgId clear(AudioMsgId::Type type); AudioMsgId clear(AudioMsgId::Type type);

View File

@ -9,6 +9,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/crash_reports.h" #include "core/crash_reports.h"
namespace Media {
namespace { namespace {
constexpr AVSampleFormat AudioToFormat = AV_SAMPLE_FMT_S16; constexpr AVSampleFormat AudioToFormat = AV_SAMPLE_FMT_S16;
@ -26,27 +27,19 @@ bool IsPlanarFormat(int format) {
} // namespace } // namespace
VideoSoundData::~VideoSoundData() { ChildFFMpegLoader::ChildFFMpegLoader(
if (frame) { std::unique_ptr<ExternalSoundData> &&data)
av_frame_free(&frame);
}
if (context) {
avcodec_close(context);
avcodec_free_context(&context);
}
}
ChildFFMpegLoader::ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data)
: AbstractAudioFFMpegLoader( : AbstractAudioFFMpegLoader(
FileLocation(), FileLocation(),
QByteArray(), QByteArray(),
bytes::vector()) bytes::vector())
, _parentData(std::move(data)) { , _parentData(std::move(data)) {
Expects(_parentData->codec != nullptr);
} }
bool ChildFFMpegLoader::open(crl::time positionMs) { bool ChildFFMpegLoader::open(crl::time positionMs) {
return initUsingContext( return initUsingContext(
_parentData->context, _parentData->codec.get(),
_parentData->length, _parentData->length,
_parentData->frequency); _parentData->frequency);
} }
@ -64,8 +57,8 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readFromInitialFrame(
} }
AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore( AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
QByteArray &result, QByteArray & result,
int64 &samplesAdded) { int64 & samplesAdded) {
const auto initialFrameResult = readFromInitialFrame( const auto initialFrameResult = readFromInitialFrame(
result, result,
samplesAdded); samplesAdded);
@ -74,32 +67,37 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
} }
const auto readResult = readFromReadyContext( const auto readResult = readFromReadyContext(
_parentData->context, _parentData->codec.get(),
result, result,
samplesAdded); samplesAdded);
if (readResult != ReadResult::Wait) { if (readResult != ReadResult::Wait) {
return readResult; return readResult;
} }
if (_queue.isEmpty()) { if (_queue.empty()) {
return _eofReached ? ReadResult::EndOfFile : ReadResult::Wait; return _eofReached ? ReadResult::EndOfFile : ReadResult::Wait;
} }
AVPacket packet; auto packet = std::move(_queue.front());
FFMpeg::packetFromDataWrap(packet, _queue.dequeue()); _queue.pop_front();
_eofReached = FFMpeg::isNullPacket(packet); _eofReached = packet.empty();
if (_eofReached) { if (_eofReached) {
avcodec_send_packet(_parentData->context, nullptr); // drain avcodec_send_packet(_parentData->codec.get(), nullptr); // drain
return ReadResult::Ok; return ReadResult::Ok;
} }
auto res = avcodec_send_packet(_parentData->context, &packet); auto res = avcodec_send_packet(
_parentData->codec.get(),
&packet.fields());
if (res < 0) { if (res < 0) {
FFMpeg::freePacket(&packet);
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Audio Error: Unable to avcodec_send_packet() file '%1', data size '%2', error %3, %4").arg(_file.name()).arg(_data.size()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); LOG(("Audio Error: Unable to avcodec_send_packet() file '%1', "
"data size '%2', error %3, %4"
).arg(_file.name()
).arg(_data.size()
).arg(res
).arg(av_make_error_string(err, sizeof(err), res)));
// There is a sample voice message where skipping such packet // There is a sample voice message where skipping such packet
// results in a crash (read_access to nullptr) in swr_convert(). // results in a crash (read_access to nullptr) in swr_convert().
if (res == AVERROR_INVALIDDATA) { if (res == AVERROR_INVALIDDATA) {
@ -107,16 +105,18 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
} }
return ReadResult::Error; return ReadResult::Error;
} }
FFMpeg::freePacket(&packet);
return ReadResult::Ok; return ReadResult::Ok;
} }
void ChildFFMpegLoader::enqueuePackets( void ChildFFMpegLoader::enqueuePackets(
QQueue<FFMpeg::AVPacketDataWrap> &&packets) { std::deque<Streaming::Packet> &&packets) {
if (_queue.empty()) { if (_queue.empty()) {
_queue = std::move(packets); _queue = std::move(packets);
} else { } else {
_queue += std::move(packets); _queue.insert(
end(_queue),
std::make_move_iterator(packets.begin()),
std::make_move_iterator(packets.end()));
} }
packets.clear(); packets.clear();
} }
@ -129,10 +129,6 @@ bool ChildFFMpegLoader::forceToBuffer() const {
return _forceToBuffer; return _forceToBuffer;
} }
ChildFFMpegLoader::~ChildFFMpegLoader() { ChildFFMpegLoader::~ChildFFMpegLoader() = default;
for (auto &packetData : base::take(_queue)) {
AVPacket packet; } // namespace Media
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}

View File

@ -8,60 +8,26 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#pragma once #pragma once
#include "media/audio/media_audio_ffmpeg_loader.h" #include "media/audio/media_audio_ffmpeg_loader.h"
#include "media/streaming/media_streaming_utility.h"
struct VideoSoundData { namespace Media {
AVCodecContext *context = nullptr;
AVFrame *frame = nullptr; struct ExternalSoundData {
Streaming::CodecPointer codec;
Streaming::FramePointer frame;
int32 frequency = Media::Player::kDefaultFrequency; int32 frequency = Media::Player::kDefaultFrequency;
int64 length = 0; int64 length = 0;
float64 speed = 1.; // 0.5 <= speed <= 2. float64 speed = 1.; // 0.5 <= speed <= 2.
~VideoSoundData();
}; };
struct VideoSoundPart { struct ExternalSoundPart {
const AVPacket *packet = nullptr;
AudioMsgId audio; AudioMsgId audio;
Streaming::Packet packet;
}; };
namespace FFMpeg {
// AVPacket has a deprecated field, so when you copy an AVPacket
// variable (e.g. inside QQueue), a compile warning is emitted.
// We wrap full AVPacket data in a new AVPacketDataWrap struct.
// All other fields are copied from AVPacket without modifications.
struct AVPacketDataWrap {
char __data[sizeof(AVPacket)];
};
inline void packetFromDataWrap(AVPacket &packet, const AVPacketDataWrap &data) {
memcpy(&packet, &data, sizeof(data));
}
inline AVPacketDataWrap dataWrapFromPacket(const AVPacket &packet) {
AVPacketDataWrap data;
memcpy(&data, &packet, sizeof(data));
return data;
}
inline bool isNullPacket(const AVPacket &packet) {
return packet.data == nullptr && packet.size == 0;
}
inline bool isNullPacket(const AVPacket *packet) {
return isNullPacket(*packet);
}
inline void freePacket(AVPacket *packet) {
if (!isNullPacket(packet)) {
av_packet_unref(packet);
}
}
} // namespace FFMpeg
class ChildFFMpegLoader : public AbstractAudioFFMpegLoader { class ChildFFMpegLoader : public AbstractAudioFFMpegLoader {
public: public:
ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data); ChildFFMpegLoader(std::unique_ptr<ExternalSoundData> &&data);
bool open(crl::time positionMs) override; bool open(crl::time positionMs) override;
@ -70,8 +36,7 @@ public:
} }
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override; ReadResult readMore(QByteArray &result, int64 &samplesAdded) override;
void enqueuePackets( void enqueuePackets(std::deque<Streaming::Packet> &&packets) override;
QQueue<FFMpeg::AVPacketDataWrap> &&packets) override;
void setForceToBuffer(bool force) override; void setForceToBuffer(bool force) override;
bool forceToBuffer() const override; bool forceToBuffer() const override;
@ -89,9 +54,11 @@ private:
QByteArray &result, QByteArray &result,
int64 &samplesAdded); int64 &samplesAdded);
std::unique_ptr<VideoSoundData> _parentData; std::unique_ptr<ExternalSoundData> _parentData;
QQueue<FFMpeg::AVPacketDataWrap> _queue; std::deque<Streaming::Packet> _queue;
bool _forceToBuffer = false; bool _forceToBuffer = false;
bool _eofReached = false; bool _eofReached = false;
}; };
} // namespace Media

View File

@ -44,26 +44,25 @@ bool isAlignedImage(const QImage &image) {
} // namespace } // namespace
FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data, const AudioMsgId &audio) : ReaderImplementation(location, data) FFMpegReaderImplementation::FFMpegReaderImplementation(
FileLocation *location,
QByteArray *data,
const AudioMsgId &audio)
: ReaderImplementation(location, data)
, _frame(Streaming::MakeFramePointer())
, _audioMsgId(audio) { , _audioMsgId(audio) {
_frame = av_frame_alloc();
} }
ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() { ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
if (_frameRead) {
av_frame_unref(_frame);
_frameRead = false;
}
do { do {
int res = avcodec_receive_frame(_codecContext, _frame); int res = avcodec_receive_frame(_codecContext, _frame.get());
if (res >= 0) { if (res >= 0) {
processReadFrame(); processReadFrame();
return ReadResult::Success; return ReadResult::Success;
} }
if (res == AVERROR_EOF) { if (res == AVERROR_EOF) {
clearPacketQueue(); _packetQueue.clear();
if (_mode == Mode::Normal) { if (_mode == Mode::Normal) {
return ReadResult::EndOfFile; return ReadResult::EndOfFile;
} }
@ -96,7 +95,7 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
return ReadResult::Error; return ReadResult::Error;
} }
while (_packetQueue.isEmpty()) { while (_packetQueue.empty()) {
auto packetResult = readAndProcessPacket(); auto packetResult = readAndProcessPacket();
if (packetResult == PacketResult::Error) { if (packetResult == PacketResult::Error) {
return ReadResult::Error; return ReadResult::Error;
@ -104,19 +103,27 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
break; break;
} }
} }
if (_packetQueue.isEmpty()) { if (_packetQueue.empty()) {
avcodec_send_packet(_codecContext, nullptr); // drain avcodec_send_packet(_codecContext, nullptr); // drain
continue; continue;
} }
startPacket(); auto packet = std::move(_packetQueue.front());
_packetQueue.pop_front();
AVPacket packet; const auto native = &packet.fields();
FFMpeg::packetFromDataWrap(packet, _packetQueue.head()); const auto guard = gsl::finally([
res = avcodec_send_packet(_codecContext, &packet); &,
size = native->size,
data = native->data
] {
native->size = size;
native->data = data;
packet = Streaming::Packet();
});
res = avcodec_send_packet(_codecContext, native);
if (res < 0) { if (res < 0) {
finishPacket();
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to avcodec_send_packet() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); LOG(("Gif Error: Unable to avcodec_send_packet() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
if (res == AVERROR_INVALIDDATA) { if (res == AVERROR_INVALIDDATA) {
@ -126,7 +133,6 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
} }
return ReadResult::Error; return ReadResult::Error;
} }
finishPacket();
} while (true); } while (true);
return ReadResult::Error; return ReadResult::Error;
@ -171,7 +177,12 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(crl:
} }
// sync by audio stream // sync by audio stream
auto correctMs = (frameMs >= 0) ? Player::mixer()->getVideoCorrectedTime(_audioMsgId, frameMs, systemMs) : frameMs; auto correctMs = (frameMs >= 0)
? Player::mixer()->getExternalCorrectedTime(
_audioMsgId,
frameMs,
systemMs)
: frameMs;
if (!_frameRead) { if (!_frameRead) {
auto readResult = readNextFrame(); auto readResult = readNextFrame();
if (readResult != ReadResult::Success) { if (readResult != ReadResult::Success) {
@ -232,7 +243,7 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
} else { } else {
if ((_swsSize != toSize) || (_frame->format != -1 && _frame->format != _codecContext->pix_fmt) || !_swsContext) { if ((_swsSize != toSize) || (_frame->format != -1 && _frame->format != _codecContext->pix_fmt) || !_swsContext) {
_swsSize = toSize; _swsSize = toSize;
_swsContext = sws_getCachedContext(_swsContext, _frame->width, _frame->height, AVPixelFormat(_frame->format), toSize.width(), toSize.height(), AV_PIX_FMT_BGRA, 0, 0, 0, 0); _swsContext = sws_getCachedContext(_swsContext, _frame->width, _frame->height, AVPixelFormat(_frame->format), toSize.width(), toSize.height(), AV_PIX_FMT_BGRA, 0, nullptr, nullptr, nullptr);
} }
// AV_NUM_DATA_POINTERS defined in AVFrame struct // AV_NUM_DATA_POINTERS defined in AVFrame struct
uint8_t *toData[AV_NUM_DATA_POINTERS] = { to.bits(), nullptr }; uint8_t *toData[AV_NUM_DATA_POINTERS] = { to.bits(), nullptr };
@ -264,7 +275,8 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
} }
} }
av_frame_unref(_frame); Streaming::ClearFrameMemory(_frame.get());
return true; return true;
} }
@ -286,7 +298,7 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
return false; return false;
} }
_ioBuffer = (uchar*)av_malloc(AVBlockSize); _ioBuffer = (uchar*)av_malloc(AVBlockSize);
_ioContext = avio_alloc_context(_ioBuffer, AVBlockSize, 0, static_cast<void*>(this), &FFMpegReaderImplementation::_read, 0, &FFMpegReaderImplementation::_seek); _ioContext = avio_alloc_context(_ioBuffer, AVBlockSize, 0, static_cast<void*>(this), &FFMpegReaderImplementation::_read, nullptr, &FFMpegReaderImplementation::_seek);
_fmtContext = avformat_alloc_context(); _fmtContext = avformat_alloc_context();
if (!_fmtContext) { if (!_fmtContext) {
LOG(("Gif Error: Unable to avformat_alloc_context %1").arg(logData())); LOG(("Gif Error: Unable to avformat_alloc_context %1").arg(logData()));
@ -296,26 +308,26 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
int res = 0; int res = 0;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
if ((res = avformat_open_input(&_fmtContext, 0, 0, 0)) < 0) { if ((res = avformat_open_input(&_fmtContext, nullptr, nullptr, nullptr)) < 0) {
_ioBuffer = 0; _ioBuffer = nullptr;
LOG(("Gif Error: Unable to avformat_open_input %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); LOG(("Gif Error: Unable to avformat_open_input %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false; return false;
} }
_opened = true; _opened = true;
if ((res = avformat_find_stream_info(_fmtContext, 0)) < 0) { if ((res = avformat_find_stream_info(_fmtContext, nullptr)) < 0) {
LOG(("Gif Error: Unable to avformat_find_stream_info %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); LOG(("Gif Error: Unable to avformat_find_stream_info %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false; return false;
} }
_streamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0); _streamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
if (_streamId < 0) { if (_streamId < 0) {
LOG(("Gif Error: Unable to av_find_best_stream %1, error %2, %3").arg(logData()).arg(_streamId).arg(av_make_error_string(err, sizeof(err), _streamId))); LOG(("Gif Error: Unable to av_find_best_stream %1, error %2, %3").arg(logData()).arg(_streamId).arg(av_make_error_string(err, sizeof(err), _streamId)));
return false; return false;
} }
auto rotateTag = av_dict_get(_fmtContext->streams[_streamId]->metadata, "rotate", NULL, 0); auto rotateTag = av_dict_get(_fmtContext->streams[_streamId]->metadata, "rotate", nullptr, 0);
if (rotateTag && *rotateTag->value) { if (rotateTag && *rotateTag->value) {
auto stringRotateTag = QString::fromUtf8(rotateTag->value); auto stringRotateTag = QString::fromUtf8(rotateTag->value);
auto toIntSucceeded = false; auto toIntSucceeded = false;
@ -339,20 +351,20 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
const auto codec = avcodec_find_decoder(_codecContext->codec_id); const auto codec = avcodec_find_decoder(_codecContext->codec_id);
_audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0); _audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0);
if (_mode == Mode::Inspecting) { if (_mode == Mode::Inspecting) {
_hasAudioStream = (_audioStreamId >= 0); _hasAudioStream = (_audioStreamId >= 0);
_audioStreamId = -1; _audioStreamId = -1;
} else if (_mode == Mode::Silent || !_audioMsgId.playId()) { } else if (_mode == Mode::Silent || !_audioMsgId.externalPlayId()) {
_audioStreamId = -1; _audioStreamId = -1;
} }
if ((res = avcodec_open2(_codecContext, codec, 0)) < 0) { if ((res = avcodec_open2(_codecContext, codec, nullptr)) < 0) {
LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false; return false;
} }
std::unique_ptr<VideoSoundData> soundData; std::unique_ptr<ExternalSoundData> soundData;
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
auto audioContext = avcodec_alloc_context3(nullptr); auto audioContext = avcodec_alloc_context3(nullptr);
if (!audioContext) { if (!audioContext) {
@ -372,8 +384,8 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
_audioStreamId = -1; _audioStreamId = -1;
} else { } else {
soundData = std::make_unique<VideoSoundData>(); soundData = std::make_unique<ExternalSoundData>();
soundData->context = audioContext; soundData->codec = Streaming::CodecPointer(audioContext);
soundData->frequency = _fmtContext->streams[_audioStreamId]->codecpar->sample_rate; soundData->frequency = _fmtContext->streams[_audioStreamId]->codecpar->sample_rate;
if (_fmtContext->streams[_audioStreamId]->duration == AV_NOPTS_VALUE) { if (_fmtContext->streams[_audioStreamId]->duration == AV_NOPTS_VALUE) {
soundData->length = (_fmtContext->duration * soundData->frequency) / AV_TIME_BASE; soundData->length = (_fmtContext->duration * soundData->frequency) / AV_TIME_BASE;
@ -393,10 +405,10 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
} }
} }
AVPacket packet; Streaming::Packet packet;
auto readResult = readPacket(&packet); auto readResult = readPacket(packet);
if (readResult == PacketResult::Ok && positionMs > 0) { if (readResult == PacketResult::Ok && positionMs > 0) {
positionMs = countPacketMs(&packet); positionMs = countPacketMs(packet);
} }
if (hasAudio()) { if (hasAudio()) {
@ -404,7 +416,7 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
} }
if (readResult == PacketResult::Ok) { if (readResult == PacketResult::Ok) {
processPacket(&packet); processPacket(std::move(packet));
} }
return true; return true;
@ -424,14 +436,14 @@ bool FFMpegReaderImplementation::inspectAt(crl::time &positionMs) {
_packetQueue.clear(); _packetQueue.clear();
AVPacket packet; Streaming::Packet packet;
auto readResult = readPacket(&packet); auto readResult = readPacket(packet);
if (readResult == PacketResult::Ok && positionMs > 0) { if (readResult == PacketResult::Ok && positionMs > 0) {
positionMs = countPacketMs(&packet); positionMs = countPacketMs(packet);
} }
if (readResult == PacketResult::Ok) { if (readResult == PacketResult::Ok) {
processPacket(&packet); processPacket(std::move(packet));
} }
return true; return true;
@ -455,12 +467,6 @@ QString FFMpegReaderImplementation::logData() const {
} }
FFMpegReaderImplementation::~FFMpegReaderImplementation() { FFMpegReaderImplementation::~FFMpegReaderImplementation() {
clearPacketQueue();
if (_frameRead) {
av_frame_unref(_frame);
_frameRead = false;
}
if (_codecContext) avcodec_free_context(&_codecContext); if (_codecContext) avcodec_free_context(&_codecContext);
if (_swsContext) sws_freeContext(_swsContext); if (_swsContext) sws_freeContext(_swsContext);
if (_opened) { if (_opened) {
@ -473,24 +479,18 @@ FFMpegReaderImplementation::~FFMpegReaderImplementation() {
av_freep(&_ioBuffer); av_freep(&_ioBuffer);
} }
if (_fmtContext) avformat_free_context(_fmtContext); if (_fmtContext) avformat_free_context(_fmtContext);
av_frame_free(&_frame);
} }
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(AVPacket *packet) { FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(Streaming::Packet &packet) {
av_init_packet(packet);
packet->data = nullptr;
packet->size = 0;
int res = 0; int res = 0;
if ((res = av_read_frame(_fmtContext, packet)) < 0) { if ((res = av_read_frame(_fmtContext, &packet.fields())) < 0) {
if (res == AVERROR_EOF) { if (res == AVERROR_EOF) {
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
// queue terminating packet to audio player // queue terminating packet to audio player
auto drain = AVPacket(); Player::mixer()->feedFromExternal({
av_init_packet(&drain); _audioMsgId,
drain.data = nullptr; Streaming::Packet()
drain.size = 0; });
Player::mixer()->feedFromVideo({ &drain, _audioMsgId });
} }
return PacketResult::EndOfFile; return PacketResult::EndOfFile;
} }
@ -501,72 +501,44 @@ FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(
return PacketResult::Ok; return PacketResult::Ok;
} }
void FFMpegReaderImplementation::processPacket(AVPacket *packet) { void FFMpegReaderImplementation::processPacket(Streaming::Packet &&packet) {
auto videoPacket = (packet->stream_index == _streamId); const auto &native = packet.fields();
auto audioPacket = (_audioStreamId >= 0 && packet->stream_index == _audioStreamId); auto videoPacket = (native.stream_index == _streamId);
auto audioPacket = (_audioStreamId >= 0 && native.stream_index == _audioStreamId);
if (audioPacket || videoPacket) { if (audioPacket || videoPacket) {
if (videoPacket) { if (videoPacket) {
_lastReadVideoMs = countPacketMs(packet); _lastReadVideoMs = countPacketMs(packet);
_packetQueue.enqueue(FFMpeg::dataWrapFromPacket(*packet)); _packetQueue.push_back(std::move(packet));
} else if (audioPacket) { } else if (audioPacket) {
_lastReadAudioMs = countPacketMs(packet); _lastReadAudioMs = countPacketMs(packet);
// queue packet to audio player // queue packet to audio player
Player::mixer()->feedFromVideo({ packet, _audioMsgId }); Player::mixer()->feedFromExternal({
_audioMsgId,
std::move(packet)
});
} }
} else {
av_packet_unref(packet);
} }
} }
crl::time FFMpegReaderImplementation::countPacketMs(AVPacket *packet) const { crl::time FFMpegReaderImplementation::countPacketMs(
int64 packetPts = (packet->pts == AV_NOPTS_VALUE) ? packet->dts : packet->pts; const Streaming::Packet &packet) const {
crl::time packetMs = (packetPts * 1000LL * _fmtContext->streams[packet->stream_index]->time_base.num) / _fmtContext->streams[packet->stream_index]->time_base.den; const auto &native = packet.fields();
int64 packetPts = (native.pts == AV_NOPTS_VALUE) ? native.dts : native.pts;
crl::time packetMs = (packetPts * 1000LL * _fmtContext->streams[native.stream_index]->time_base.num) / _fmtContext->streams[native.stream_index]->time_base.den;
return packetMs; return packetMs;
} }
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readAndProcessPacket() { FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readAndProcessPacket() {
AVPacket packet; Streaming::Packet packet;
auto result = readPacket(&packet); auto result = readPacket(packet);
if (result == PacketResult::Ok) { if (result == PacketResult::Ok) {
processPacket(&packet); processPacket(std::move(packet));
} }
return result; return result;
} }
void FFMpegReaderImplementation::startPacket() {
if (!_packetStarted && !_packetQueue.isEmpty()) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, _packetQueue.head());
_packetStartedSize = packet.size;
_packetStartedData = packet.data;
_packetStarted = true;
}
}
void FFMpegReaderImplementation::finishPacket() {
if (_packetStarted) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, _packetQueue.head());
packet.size = _packetStartedSize;
packet.data = _packetStartedData;
_packetStarted = false;
av_packet_unref(&packet);
_packetQueue.dequeue();
}
}
void FFMpegReaderImplementation::clearPacketQueue() {
finishPacket();
auto packets = base::take(_packetQueue);
for (auto &packetData : packets) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
av_packet_unref(&packet);
}
}
int FFMpegReaderImplementation::_read(void *opaque, uint8_t *buf, int buf_size) { int FFMpegReaderImplementation::_read(void *opaque, uint8_t *buf, int buf_size) {
FFMpegReaderImplementation *l = reinterpret_cast<FFMpegReaderImplementation*>(opaque); FFMpegReaderImplementation *l = reinterpret_cast<FFMpegReaderImplementation*>(opaque);
return int(l->_device->read((char*)(buf), buf_size)); return int(l->_device->read((char*)(buf), buf_size));

View File

@ -15,6 +15,7 @@ extern "C" {
#include "media/clip/media_clip_implementation.h" #include "media/clip/media_clip_implementation.h"
#include "media/audio/media_child_ffmpeg_loader.h" #include "media/audio/media_child_ffmpeg_loader.h"
#include "media/streaming/media_streaming_utility.h"
namespace Media { namespace Media {
namespace Clip { namespace Clip {
@ -54,9 +55,9 @@ private:
EndOfFile, EndOfFile,
Error, Error,
}; };
PacketResult readPacket(AVPacket *packet); PacketResult readPacket(Streaming::Packet &packet);
void processPacket(AVPacket *packet); void processPacket(Streaming::Packet &&packet);
crl::time countPacketMs(AVPacket *packet) const; crl::time countPacketMs(const Streaming::Packet &packet) const;
PacketResult readAndProcessPacket(); PacketResult readAndProcessPacket();
enum class Rotation { enum class Rotation {
@ -70,10 +71,6 @@ private:
return (_rotation == Rotation::Degrees90) || (_rotation == Rotation::Degrees270); return (_rotation == Rotation::Degrees90) || (_rotation == Rotation::Degrees270);
} }
void startPacket();
void finishPacket();
void clearPacketQueue();
static int _read(void *opaque, uint8_t *buf, int buf_size); static int _read(void *opaque, uint8_t *buf, int buf_size);
static int64_t _seek(void *opaque, int64_t offset, int whence); static int64_t _seek(void *opaque, int64_t offset, int whence);
@ -86,7 +83,7 @@ private:
AVFormatContext *_fmtContext = nullptr; AVFormatContext *_fmtContext = nullptr;
AVCodecContext *_codecContext = nullptr; AVCodecContext *_codecContext = nullptr;
int _streamId = 0; int _streamId = 0;
AVFrame *_frame = nullptr; Streaming::FramePointer _frame;
bool _opened = false; bool _opened = false;
bool _hadFrame = false; bool _hadFrame = false;
bool _frameRead = false; bool _frameRead = false;
@ -98,10 +95,7 @@ private:
crl::time _lastReadVideoMs = 0; crl::time _lastReadVideoMs = 0;
crl::time _lastReadAudioMs = 0; crl::time _lastReadAudioMs = 0;
QQueue<FFMpeg::AVPacketDataWrap> _packetQueue; std::deque<Streaming::Packet> _packetQueue;
int _packetStartedSize = 0;
uint8_t *_packetStartedData = nullptr;
bool _packetStarted = false;
int _width = 0; int _width = 0;
int _height = 0; int _height = 0;

View File

@ -90,7 +90,10 @@ Reader::Reader(const QString &filepath, Callback &&callback, Mode mode, crl::tim
Reader::Reader(not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode, crl::time seekMs) Reader::Reader(not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode, crl::time seekMs)
: _callback(std::move(callback)) : _callback(std::move(callback))
, _mode(mode) , _mode(mode)
, _audioMsgId(document, msgId, (mode == Mode::Video) ? rand_value<uint32>() : 0) , _audioMsgId(
document,
msgId,
(mode == Mode::Video) ? AudioMsgId::CreateExternalPlayId() : 0)
, _seekPositionMs(seekMs) { , _seekPositionMs(seekMs) {
init(document->location(), document->data()); init(document->location(), document->data());
} }

View File

@ -1,393 +0,0 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "media/player/media_player_cover.h"
#include "data/data_document.h"
#include "ui/widgets/labels.h"
#include "ui/widgets/continuous_sliders.h"
#include "ui/widgets/buttons.h"
#include "media/audio/media_audio.h"
#include "media/view/media_view_playback_progress.h"
#include "media/player/media_player_button.h"
#include "media/player/media_player_instance.h"
#include "media/player/media_player_volume_controller.h"
#include "styles/style_media_player.h"
#include "styles/style_mediaview.h"
#include "layout.h"
namespace Media {
namespace Player {
using ButtonState = PlayButtonLayout::State;
class CoverWidget::PlayButton : public Ui::AbstractButton {
public:
PlayButton(QWidget *parent);
void setState(ButtonState state) {
_layout.setState(state);
}
void finishTransform() {
_layout.finishTransform();
}
protected:
void paintEvent(QPaintEvent *e) override;
private:
PlayButtonLayout _layout;
};
CoverWidget::PlayButton::PlayButton(QWidget *parent) : Ui::AbstractButton(parent)
, _layout(st::mediaPlayerPanelButton, [this] { update(); }) {
resize(st::mediaPlayerPanelButtonSize);
setCursor(style::cur_pointer);
}
void CoverWidget::PlayButton::paintEvent(QPaintEvent *e) {
Painter p(this);
p.translate(st::mediaPlayerPanelButtonPosition.x(), st::mediaPlayerPanelButtonPosition.y());
_layout.paint(p, st::mediaPlayerActiveFg);
}
CoverWidget::CoverWidget(QWidget *parent) : RpWidget(parent)
, _nameLabel(this, st::mediaPlayerName)
, _timeLabel(this, st::mediaPlayerTime)
, _close(this, st::mediaPlayerPanelClose)
, _playbackSlider(this, st::mediaPlayerPanelPlayback)
, _playbackProgress(std::make_unique<View::PlaybackProgress>())
, _playPause(this)
, _volumeToggle(this, st::mediaPlayerVolumeToggle)
, _volumeController(this)
, _pinPlayer(this, st::mediaPlayerPanelPinButton)
, _repeatTrack(this, st::mediaPlayerRepeatButton) {
setAttribute(Qt::WA_OpaquePaintEvent);
resize(width(), st::mediaPlayerCoverHeight);
_close->hide();
_nameLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
_timeLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
setMouseTracking(true);
_playbackProgress->setInLoadingStateChangedCallback([=](bool loading) {
_playbackSlider->setDisabled(loading);
});
_playbackProgress->setValueChangedCallback([=](float64 value) {
_playbackSlider->setValue(value);
});
_playbackSlider->setChangeProgressCallback([=](float64 value) {
_playbackProgress->setValue(value, false);
handleSeekProgress(value);
});
_playbackSlider->setChangeFinishedCallback([=](float64 value) {
_playbackProgress->setValue(value, false);
handleSeekFinished(value);
});
_playPause->setClickedCallback([=] {
instance()->playPauseCancelClicked(AudioMsgId::Type::Song);
});
updateRepeatTrackIcon();
_repeatTrack->setClickedCallback([=] {
instance()->toggleRepeat(AudioMsgId::Type::Song);
});
updateVolumeToggleIcon();
_volumeToggle->setClickedCallback([=]() {
Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume());
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify();
});
subscribe(Global::RefSongVolumeChanged(), [=] { updateVolumeToggleIcon(); });
subscribe(instance()->repeatChangedNotifier(), [=](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Song) {
updateRepeatTrackIcon();
}
});
subscribe(instance()->updatedNotifier(), [=](const TrackState &state) {
if (state.id.type() == AudioMsgId::Type::Song) {
handleSongUpdate(state);
}
});
subscribe(instance()->trackChangedNotifier(), [=](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Song) {
handleSongChange();
}
});
instance()->playlistChanges(
AudioMsgId::Type::Song
) | rpl::start_with_next([=] {
handlePlaylistUpdate();
}, lifetime());
handleSongChange();
handleSongUpdate(mixer()->currentState(AudioMsgId::Type::Song));
_playPause->finishTransform();
}
void CoverWidget::setPinCallback(ButtonCallback &&callback) {
_pinPlayer->setClickedCallback(std::move(callback));
}
void CoverWidget::setCloseCallback(ButtonCallback &&callback) {
_close->setClickedCallback(std::move(callback));
}
void CoverWidget::handleSeekProgress(float64 progress) {
if (!_lastDurationMs) return;
auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs);
if (_seekPositionMs != positionMs) {
_seekPositionMs = positionMs;
updateTimeLabel();
instance()->startSeeking(AudioMsgId::Type::Song);
}
}
void CoverWidget::handleSeekFinished(float64 progress) {
if (!_lastDurationMs) return;
auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs);
_seekPositionMs = -1;
auto type = AudioMsgId::Type::Song;
auto state = Media::Player::mixer()->currentState(type);
if (state.id && state.length && state.frequency) {
Media::Player::mixer()->seek(type, qRound(progress * state.length * 1000. / state.frequency));
}
instance()->stopSeeking(type);
}
void CoverWidget::resizeEvent(QResizeEvent *e) {
auto widthForName = width() - 2 * (st::mediaPlayerPanelPadding);
widthForName -= _timeLabel->width() + 2 * st::normalFont->spacew;
_nameLabel->resizeToWidth(widthForName);
updateLabelPositions();
_close->moveToRight(0, 0);
int skip = (st::mediaPlayerPanelPlayback.seekSize.width() / 2);
int length = (width() - 2 * st::mediaPlayerPanelPadding + st::mediaPlayerPanelPlayback.seekSize.width());
_playbackSlider->setGeometry(st::mediaPlayerPanelPadding - skip, st::mediaPlayerPanelPlaybackTop, length, 2 * st::mediaPlayerPanelPlaybackPadding + st::mediaPlayerPanelPlayback.width);
auto top = st::mediaPlayerPanelVolumeToggleTop;
auto right = st::mediaPlayerPanelPlayLeft;
_repeatTrack->moveToRight(right, top); right += _repeatTrack->width();
_pinPlayer->moveToRight(right, top); right += _pinPlayer->width() + st::mediaPlayerPanelVolumeSkip;
_volumeController->moveToRight(right, st::mediaPlayerPanelVolumeTop); right += _volumeController->width() + st::mediaPlayerPanelVolumeToggleSkip;
_volumeToggle->moveToRight(right, top);
updatePlayPrevNextPositions();
}
void CoverWidget::paintEvent(QPaintEvent *e) {
Painter p(this);
p.fillRect(e->rect(), st::windowBg);
}
void CoverWidget::mouseMoveEvent(QMouseEvent *e) {
auto closeAreaLeft = st::mediaPlayerPanelPadding + _nameLabel->width();
auto closeAreaHeight = _nameLabel->y() + _nameLabel->height();
auto closeArea = myrtlrect(closeAreaLeft, 0, width() - closeAreaLeft, closeAreaHeight);
auto closeVisible = closeArea.contains(e->pos());
setCloseVisible(closeVisible);
}
void CoverWidget::leaveEventHook(QEvent *e) {
setCloseVisible(false);
}
void CoverWidget::setCloseVisible(bool visible) {
if (visible == _close->isHidden()) {
_close->setVisible(visible);
_timeLabel->setVisible(!visible);
}
}
void CoverWidget::updatePlayPrevNextPositions() {
auto left = st::mediaPlayerPanelPlayLeft;
auto top = st::mediaPlayerPanelPlayTop;
if (_previousTrack) {
_previousTrack->moveToLeft(left, top); left += _previousTrack->width() + st::mediaPlayerPanelPlaySkip;
_playPause->moveToLeft(left, top); left += _playPause->width() + st::mediaPlayerPanelPlaySkip;
_nextTrack->moveToLeft(left, top);
} else {
_playPause->moveToLeft(left, top);
}
}
void CoverWidget::updateLabelPositions() {
_nameLabel->moveToLeft(st::mediaPlayerPanelPadding, st::mediaPlayerPanelNameTop - st::mediaPlayerName.style.font->ascent);
_timeLabel->moveToRight(st::mediaPlayerPanelPadding, st::mediaPlayerPanelNameTop - st::mediaPlayerTime.font->ascent);
}
void CoverWidget::updateRepeatTrackIcon() {
_repeatTrack->setIconOverride(instance()->repeatEnabled(AudioMsgId::Type::Song) ? nullptr : &st::mediaPlayerRepeatInactiveIcon);
}
void CoverWidget::handleSongUpdate(const TrackState &state) {
if (!state.id.audio() || !state.id.audio()->isAudioFile()) {
return;
}
if (state.id.audio()->loading()) {
_playbackProgress->updateLoadingState(state.id.audio()->progress());
} else {
_playbackProgress->updateState(state);
}
auto stopped = IsStoppedOrStopping(state.state);
auto showPause = ShowPauseIcon(state.state);
if (instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
auto buttonState = [audio = state.id.audio(), showPause] {
if (audio->loading()) {
return ButtonState::Cancel;
} else if (showPause) {
return ButtonState::Pause;
}
return ButtonState::Play;
};
_playPause->setState(buttonState());
updateTimeText(state);
}
void CoverWidget::updateTimeText(const TrackState &state) {
QString time;
qint64 position = 0, length = 0, display = 0;
auto frequency = state.frequency;
if (!IsStoppedOrStopping(state.state)) {
display = position = state.position;
length = state.length;
} else if (const auto songData = state.id.audio()->song()) {
length = state.length ? state.length : (songData->duration * frequency);
}
_lastDurationMs = (state.length * 1000LL) / frequency;
if (state.id.audio()->loading()) {
_time = QString::number(qRound(state.id.audio()->progress() * 100)) + '%';
_playbackSlider->setDisabled(true);
} else {
display = display / frequency;
_time = formatDurationText(display);
_playbackSlider->setDisabled(false);
}
if (_seekPositionMs < 0) {
updateTimeLabel();
}
}
void CoverWidget::updateTimeLabel() {
auto timeLabelWidth = _timeLabel->width();
if (_seekPositionMs >= 0) {
auto playAlready = _seekPositionMs / 1000LL;
_timeLabel->setText(formatDurationText(playAlready));
} else {
_timeLabel->setText(_time);
}
if (timeLabelWidth != _timeLabel->width()) {
_nameLabel->resizeToWidth(width() - 2 * (st::mediaPlayerPanelPadding) - _timeLabel->width() - st::normalFont->spacew);
updateLabelPositions();
}
}
void CoverWidget::handleSongChange() {
const auto current = instance()->current(AudioMsgId::Type::Song);
const auto document = current.audio();
if (!current || !document) {
return;
}
TextWithEntities textWithEntities;
const auto song = document ? document->song() : nullptr;
if (!song) {
textWithEntities.text = document->filename().isEmpty()
? qsl("Unknown Track")
: document->filename();
} else if (song->performer.isEmpty()) {
textWithEntities.text = song->title.isEmpty()
? (document->filename().isEmpty()
? qsl("Unknown Track")
: document->filename())
: song->title;
} else {
auto title = song->title.isEmpty()
? qsl("Unknown Track")
: TextUtilities::Clean(song->title);
textWithEntities.text = song->performer + QString::fromUtf8(" \xe2\x80\x93 ") + title;
textWithEntities.entities.append({ EntityInTextBold, 0, song->performer.size(), QString() });
}
_nameLabel->setMarkedText(textWithEntities);
handlePlaylistUpdate();
}
void CoverWidget::handlePlaylistUpdate() {
const auto type = AudioMsgId::Type::Song;
const auto previousEnabled = instance()->previousAvailable(type);
const auto nextEnabled = instance()->nextAvailable(type);
if (!previousEnabled && !nextEnabled) {
destroyPrevNextButtons();
} else {
createPrevNextButtons();
_previousTrack->setIconOverride(previousEnabled ? nullptr : &st::mediaPlayerPanelPreviousDisabledIcon);
_previousTrack->setCursor(previousEnabled ? style::cur_pointer : style::cur_default);
_nextTrack->setIconOverride(nextEnabled ? nullptr : &st::mediaPlayerPanelNextDisabledIcon);
_nextTrack->setCursor(nextEnabled ? style::cur_pointer : style::cur_default);
}
}
void CoverWidget::createPrevNextButtons() {
if (!_previousTrack) {
_previousTrack.create(this, st::mediaPlayerPanelPreviousButton);
_previousTrack->show();
_previousTrack->setClickedCallback([=]() {
instance()->previous();
});
_nextTrack.create(this, st::mediaPlayerPanelNextButton);
_nextTrack->show();
_nextTrack->setClickedCallback([=]() {
instance()->next();
});
updatePlayPrevNextPositions();
}
}
void CoverWidget::destroyPrevNextButtons() {
if (_previousTrack) {
_previousTrack.destroy();
_nextTrack.destroy();
updatePlayPrevNextPositions();
}
}
void CoverWidget::updateVolumeToggleIcon() {
auto icon = []() -> const style::icon * {
auto volume = Global::SongVolume();
if (volume > 0) {
if (volume < 1 / 3.) {
return &st::mediaPlayerVolumeIcon1;
} else if (volume < 2 / 3.) {
return &st::mediaPlayerVolumeIcon2;
}
return &st::mediaPlayerVolumeIcon3;
}
return nullptr;
};
_volumeToggle->setIconOverride(icon());
}
} // namespace Player
} // namespace Media

View File

@ -1,86 +0,0 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/rp_widget.h"
class AudioMsgId;
namespace Ui {
class FlatLabel;
class LabelSimple;
class IconButton;
class MediaSlider;
} // namespace Ui
namespace Media {
namespace View {
class PlaybackProgress;
} // namespace View
namespace Player {
class VolumeController;
struct TrackState;
class CoverWidget : public Ui::RpWidget, private base::Subscriber {
public:
CoverWidget(QWidget *parent);
using ButtonCallback = Fn<void()>;
void setPinCallback(ButtonCallback &&callback);
void setCloseCallback(ButtonCallback &&callback);
protected:
void resizeEvent(QResizeEvent *e) override;
void paintEvent(QPaintEvent *e) override;
void mouseMoveEvent(QMouseEvent *e) override;
void leaveEventHook(QEvent *e) override;
private:
void setCloseVisible(bool visible);
void handleSeekProgress(float64 progress);
void handleSeekFinished(float64 progress);
void updatePlayPrevNextPositions();
void updateLabelPositions();
void updateRepeatTrackIcon();
void createPrevNextButtons();
void destroyPrevNextButtons();
void updateVolumeToggleIcon();
void handleSongUpdate(const TrackState &state);
void handleSongChange();
void handlePlaylistUpdate();
void updateTimeText(const TrackState &state);
void updateTimeLabel();
crl::time _seekPositionMs = -1;
crl::time _lastDurationMs = 0;
QString _time;
class PlayButton;
object_ptr<Ui::FlatLabel> _nameLabel;
object_ptr<Ui::LabelSimple> _timeLabel;
object_ptr<Ui::IconButton> _close;
object_ptr<Ui::MediaSlider> _playbackSlider;
std::unique_ptr<View::PlaybackProgress> _playbackProgress;
object_ptr<Ui::IconButton> _previousTrack = { nullptr };
object_ptr<PlayButton> _playPause;
object_ptr<Ui::IconButton> _nextTrack = { nullptr };
object_ptr<Ui::IconButton> _volumeToggle;
object_ptr<VolumeController> _volumeController;
object_ptr<Ui::IconButton> _pinPlayer;
object_ptr<Ui::IconButton> _repeatTrack;
};
} // namespace Player
} // namespace Media

View File

@ -11,6 +11,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_session.h" #include "data/data_session.h"
#include "media/audio/media_audio.h" #include "media/audio/media_audio.h"
#include "media/audio/media_audio_capture.h" #include "media/audio/media_audio_capture.h"
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_loader.h"
#include "calls/calls_instance.h" #include "calls/calls_instance.h"
#include "history/history.h" #include "history/history.h"
#include "history/history_item.h" #include "history/history_item.h"
@ -50,6 +52,34 @@ void finish(not_null<Audio::Instance*> instance) {
Audio::Finish(instance); Audio::Finish(instance);
} }
struct Instance::Streamed {
Streamed(
AudioMsgId id,
not_null<::Data::Session*> owner,
std::unique_ptr<Streaming::Loader> loader);
AudioMsgId id;
Streaming::Player player;
Streaming::Information info;
};
Instance::Streamed::Streamed(
AudioMsgId id,
not_null<::Data::Session*> owner,
std::unique_ptr<Streaming::Loader> loader)
: id(id)
, player(owner, std::move(loader)) {
}
Instance::Data::Data(AudioMsgId::Type type, SharedMediaType overview)
: type(type)
, overview(overview) {
}
Instance::Data::Data(Data &&other) = default;
Instance::Data &Instance::Data::operator=(Data &&other) = default;
Instance::Data::~Data() = default;
Instance::Instance() Instance::Instance()
: _songData(AudioMsgId::Type::Song, SharedMediaType::MusicFile) : _songData(AudioMsgId::Type::Song, SharedMediaType::MusicFile)
, _voiceData(AudioMsgId::Type::Voice, SharedMediaType::RoundVoiceFile) { , _voiceData(AudioMsgId::Type::Voice, SharedMediaType::RoundVoiceFile) {
@ -69,8 +99,6 @@ Instance::Instance()
resumeOnCall(AudioMsgId::Type::Song); resumeOnCall(AudioMsgId::Type::Song);
} }
}); });
} else {
handleLogout();
} }
}; };
subscribe( subscribe(
@ -81,10 +109,12 @@ Instance::Instance()
setupShortcuts(); setupShortcuts();
} }
Instance::~Instance() = default;
AudioMsgId::Type Instance::getActiveType() const { AudioMsgId::Type Instance::getActiveType() const {
auto voiceData = getData(AudioMsgId::Type::Voice); auto voiceData = getData(AudioMsgId::Type::Voice);
if (voiceData->current) { if (voiceData->current) {
auto state = mixer()->currentState(voiceData->type); const auto state = getState(voiceData->type);
if (voiceData->current == state.id && !IsStoppedOrStopping(state.state)) { if (voiceData->current == state.id && !IsStoppedOrStopping(state.state)) {
return voiceData->type; return voiceData->type;
} }
@ -99,26 +129,40 @@ void Instance::handleSongUpdate(const AudioMsgId &audioId) {
} }
void Instance::setCurrent(const AudioMsgId &audioId) { void Instance::setCurrent(const AudioMsgId &audioId) {
if (auto data = getData(audioId.type())) { if (const auto data = getData(audioId.type())) {
if (data->current != audioId) { if (data->current == audioId) {
data->current = audioId; return;
data->isPlaying = false;
auto history = data->history;
auto migrated = data->migrated;
auto item = data->current
? App::histItemById(data->current.contextId())
: nullptr;
if (item) {
data->history = item->history()->migrateToOrMe();
data->migrated = data->history->migrateFrom();
} else {
data->history = nullptr;
data->migrated = nullptr;
}
_trackChangedNotifier.notify(data->type, true);
refreshPlaylist(data);
} }
const auto trackChanged = (data->current.audio() != audioId.audio())
|| (data->current.contextId() != audioId.contextId());
data->current = audioId;
if (!trackChanged) {
return;
}
const auto streamedId = data->streamed
? data->streamed->id
: AudioMsgId();
if (streamedId.audio() != audioId.audio()
|| streamedId.contextId() != audioId.contextId()) {
data->streamed = nullptr;
}
data->current = audioId;
data->isPlaying = false;
auto history = data->history;
auto migrated = data->migrated;
auto item = data->current
? App::histItemById(data->current.contextId())
: nullptr;
if (item) {
data->history = item->history()->migrateToOrMe();
data->migrated = data->history->migrateFrom();
} else {
data->history = nullptr;
data->migrated = nullptr;
}
_trackChangedNotifier.notify(data->type, true);
refreshPlaylist(data);
} }
} }
@ -241,12 +285,6 @@ bool Instance::moveInPlaylist(
|| document->isVoiceMessage() || document->isVoiceMessage()
|| document->isVideoMessage()) { || document->isVideoMessage()) {
play(AudioMsgId(document, item->fullId())); play(AudioMsgId(document, item->fullId()));
} else {
//DocumentOpenClickHandler::Open(
// item->fullId(),
// document,
// item,
// ActionOnLoadPlayInline);
} }
return true; return true;
} }
@ -284,19 +322,20 @@ Instance *instance() {
} }
void Instance::play(AudioMsgId::Type type) { void Instance::play(AudioMsgId::Type type) {
auto state = mixer()->currentState(type); if (const auto data = getData(type)) {
if (state.id) { const auto state = getState(type);
if (IsStopped(state.state)) { if (state.id) {
play(state.id); if (IsStopped(state.state)) {
} else { play(state.id);
mixer()->resume(state.id); } else if (data->streamed) {
} data->streamed->player.resume();
} else if (auto data = getData(type)) { emitUpdate(type);
if (data->current) { } else {
mixer()->resume(state.id);
}
} else if (data->current) {
play(data->current); play(data->current);
} }
}
if (const auto data = getData(type)) {
data->resumeOnCallEnd = false; data->resumeOnCallEnd = false;
} }
} }
@ -306,7 +345,13 @@ void Instance::play(const AudioMsgId &audioId) {
if (!audioId || !document) { if (!audioId || !document) {
return; return;
} }
if (document->isAudioFile() || document->isVoiceMessage()) { if (document->isAudioFile()) {
auto loader = document->createStreamingLoader(audioId.contextId());
if (!loader) {
return;
}
playStreamed(audioId, std::move(loader));
} else if (document->isVoiceMessage()) {
mixer()->play(audioId); mixer()->play(audioId);
setCurrent(audioId); setCurrent(audioId);
if (document->loading()) { if (document->loading()) {
@ -322,45 +367,115 @@ void Instance::play(const AudioMsgId &audioId) {
} }
} }
void Instance::playPause(const AudioMsgId &audioId) {
const auto now = current(audioId.type());
if (now.audio() == audioId.audio()
&& now.contextId() == audioId.contextId()) {
playPause(audioId.type());
} else {
play(audioId);
}
}
void Instance::playStreamed(
const AudioMsgId &audioId,
std::unique_ptr<Streaming::Loader> loader) {
Expects(audioId.audio() != nullptr);
const auto data = getData(audioId.type());
Assert(data != nullptr);
data->streamed = std::make_unique<Streamed>(
audioId,
&audioId.audio()->owner(),
std::move(loader));
data->streamed->player.updates(
) | rpl::start_with_next_error([=](Streaming::Update &&update) {
handleStreamingUpdate(data, std::move(update));
}, [=](Streaming::Error && error) {
handleStreamingError(data, std::move(error));
}, data->streamed->player.lifetime());
data->streamed->player.play(streamingOptions(audioId));
emitUpdate(audioId.type());
}
Streaming::PlaybackOptions Instance::streamingOptions(
const AudioMsgId &audioId,
crl::time position) {
auto result = Streaming::PlaybackOptions();
result.mode = Streaming::Mode::Audio;
result.audioId = audioId;
result.position = position;
return result;
}
void Instance::pause(AudioMsgId::Type type) { void Instance::pause(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type); if (const auto data = getData(type)) {
if (state.id) { if (data->streamed) {
mixer()->pause(state.id); data->streamed->player.pause();
emitUpdate(type);
} else {
const auto state = getState(type);
if (state.id) {
mixer()->pause(state.id);
}
}
} }
} }
void Instance::stop(AudioMsgId::Type type) { void Instance::stop(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type);
if (state.id) {
mixer()->stop(state.id);
}
if (const auto data = getData(type)) { if (const auto data = getData(type)) {
if (data->streamed) {
data->streamed = nullptr;
} else {
const auto state = getState(type);
if (state.id) {
mixer()->stop(state.id);
}
}
data->resumeOnCallEnd = false; data->resumeOnCallEnd = false;
} }
} }
void Instance::playPause(AudioMsgId::Type type) { void Instance::playPause(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type);
if (state.id) {
if (IsStopped(state.state)) {
play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->resume(state.id);
} else {
mixer()->pause(state.id);
}
} else if (auto data = getData(type)) {
if (data->current) {
play(data->current);
}
}
if (const auto data = getData(type)) { if (const auto data = getData(type)) {
if (data->streamed) {
if (data->streamed->player.finished()) {
auto options = Streaming::PlaybackOptions();
options.mode = Streaming::Mode::Audio;
options.audioId = data->streamed->id;
data->streamed->player.play(options);
} else if (data->streamed->player.paused()) {
data->streamed->player.resume();
} else {
data->streamed->player.pause();
}
emitUpdate(type);
} else {
const auto state = getState(type);
if (state.id) {
if (IsStopped(state.state)) {
play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->resume(state.id);
} else {
mixer()->pause(state.id);
}
} else if (auto data = getData(type)) {
if (data->current) {
play(data->current);
}
}
}
data->resumeOnCallEnd = false; data->resumeOnCallEnd = false;
} }
} }
void Instance::pauseOnCall(AudioMsgId::Type type) { void Instance::pauseOnCall(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type); const auto state = getState(type);
if (!state.id if (!state.id
|| IsStopped(state.state) || IsStopped(state.state)
|| IsPaused(state.state) || IsPaused(state.state)
@ -401,11 +516,15 @@ void Instance::playPauseCancelClicked(AudioMsgId::Type type) {
return; return;
} }
auto state = mixer()->currentState(type); const auto data = getData(type);
auto stopped = IsStoppedOrStopping(state.state); if (!data) {
auto showPause = ShowPauseIcon(state.state); return;
auto audio = state.id.audio(); }
if (audio && audio->loading()) { const auto state = getState(type);
const auto stopped = IsStoppedOrStopping(state.state);
const auto showPause = ShowPauseIcon(state.state);
const auto audio = state.id.audio();
if (audio && audio->loading() && !data->streamed) {
audio->cancel(); audio->cancel();
} else if (showPause) { } else if (showPause) {
pause(type); pause(type);
@ -419,36 +538,69 @@ void Instance::startSeeking(AudioMsgId::Type type) {
data->seeking = data->current; data->seeking = data->current;
} }
pause(type); pause(type);
emitUpdate(type, [](const AudioMsgId &playing) { return true; }); emitUpdate(type);
} }
void Instance::stopSeeking(AudioMsgId::Type type) { void Instance::finishSeeking(AudioMsgId::Type type, float64 progress) {
if (auto data = getData(type)) { if (const auto data = getData(type)) {
if (data->streamed) {
const auto duration = data->streamed->info.audio.state.duration;
if (duration != kTimeUnknown) {
const auto position = crl::time(std::round(
std::clamp(progress, 0., 1.) * duration));
data->streamed->player.play(streamingOptions(
data->streamed->id,
position));
emitUpdate(type);
}
} else {
const auto state = getState(type);
if (state.id && state.length && state.frequency) {
mixer()->seek(type, qRound(progress * state.length * 1000. / state.frequency));
}
}
}
cancelSeeking(type);
}
void Instance::cancelSeeking(AudioMsgId::Type type) {
if (const auto data = getData(type)) {
data->seeking = AudioMsgId(); data->seeking = AudioMsgId();
} }
emitUpdate(type, [](const AudioMsgId &playing) { return true; }); emitUpdate(type);
} }
void Instance::documentLoadProgress(DocumentData *document) { void Instance::documentLoadProgress(DocumentData *document) {
const auto type = document->isAudioFile() const auto type = document->isAudioFile()
? AudioMsgId::Type::Song ? AudioMsgId::Type::Song
: AudioMsgId::Type::Voice; : AudioMsgId::Type::Voice;
emitUpdate(type, [document](const AudioMsgId &audioId) { emitUpdate(type, [&](const AudioMsgId &audioId) {
return (audioId.audio() == document); return (audioId.audio() == document);
}); });
} }
void Instance::emitUpdate(AudioMsgId::Type type) {
emitUpdate(type, [](const AudioMsgId &playing) { return true; });
}
TrackState Instance::getState(AudioMsgId::Type type) const {
if (const auto data = getData(type)) {
if (data->streamed) {
return data->streamed->player.prepareLegacyState();
}
}
return mixer()->currentState(type);
}
template <typename CheckCallback> template <typename CheckCallback>
void Instance::emitUpdate(AudioMsgId::Type type, CheckCallback check) { void Instance::emitUpdate(AudioMsgId::Type type, CheckCallback check) {
auto state = mixer()->currentState(type); if (const auto data = getData(type)) {
if (!state.id || !check(state.id)) { const auto state = getState(type);
return; if (!state.id || !check(state.id)) {
} return;
}
setCurrent(state.id); setCurrent(state.id);
_updatedNotifier.notify(state, true); _updatedNotifier.notify(state, true);
if (auto data = getData(type)) {
if (data->isPlaying && state.state == State::StoppedAtEnd) { if (data->isPlaying && state.state == State::StoppedAtEnd) {
if (data->repeatEnabled) { if (data->repeatEnabled) {
play(data->current); play(data->current);
@ -467,25 +619,25 @@ void Instance::emitUpdate(AudioMsgId::Type type, CheckCallback check) {
} }
void Instance::preloadNext(not_null<Data*> data) { void Instance::preloadNext(not_null<Data*> data) {
if (!data->current || !data->playlistSlice || !data->playlistIndex) { //if (!data->current || !data->playlistSlice || !data->playlistIndex) {
return; // return;
} //}
const auto nextIndex = *data->playlistIndex + 1; //const auto nextIndex = *data->playlistIndex + 1;
if (const auto item = itemByIndex(data, nextIndex)) { //if (const auto item = itemByIndex(data, nextIndex)) {
if (const auto media = item->media()) { // if (const auto media = item->media()) {
if (const auto document = media->document()) { // if (const auto document = media->document()) {
const auto isLoaded = document->loaded( // const auto isLoaded = document->loaded(
DocumentData::FilePathResolveSaveFromDataSilent); // DocumentData::FilePathResolveSaveFromDataSilent);
if (!isLoaded) { // if (!isLoaded) {
DocumentOpenClickHandler::Open( // DocumentOpenClickHandler::Open(
item->fullId(), // item->fullId(),
document, // document,
item, // item,
ActionOnLoadNone); // ActionOnLoadNone);
} // }
} // }
} // }
} //}
} }
void Instance::handleLogout() { void Instance::handleLogout() {
@ -495,7 +647,6 @@ void Instance::handleLogout() {
}; };
reset(AudioMsgId::Type::Voice); reset(AudioMsgId::Type::Voice);
reset(AudioMsgId::Type::Song); reset(AudioMsgId::Type::Song);
_usePanelPlayer.notify(false, true);
} }
void Instance::setupShortcuts() { void Instance::setupShortcuts() {
@ -529,5 +680,37 @@ void Instance::setupShortcuts() {
}, _lifetime); }, _lifetime);
} }
void Instance::handleStreamingUpdate(
not_null<Data*> data,
Streaming::Update &&update) {
using namespace Streaming;
update.data.match([&](Information & update) {
data->streamed->info = std::move(update);
emitUpdate(data->type);
}, [&](PreloadedVideo &update) {
}, [&](UpdateVideo &update) {
}, [&](PreloadedAudio & update) {
data->streamed->info.audio.state.receivedTill = update.till;
//emitUpdate(data->type, [](AudioMsgId) { return true; });
}, [&](UpdateAudio &update) {
data->streamed->info.audio.state.position = update.position;
emitUpdate(data->type);
}, [&](WaitingForData) {
}, [&](MutedByOther) {
}, [&](Finished) {
const auto finishTrack = [](Media::Streaming::TrackState &state) {
state.position = state.receivedTill = state.duration;
};
finishTrack(data->streamed->info.audio.state);
emitUpdate(data->type);
});
}
void Instance::handleStreamingError(
not_null<Data*> data,
Streaming::Error &&error) {
}
} // namespace Player } // namespace Player
} // namespace Media } // namespace Media

View File

@ -15,7 +15,15 @@ namespace Media {
namespace Audio { namespace Audio {
class Instance; class Instance;
} // namespace Audio } // namespace Audio
namespace Streaming {
class Loader;
struct PlaybackOptions;
struct Update;
struct Error;
} // namespace Streaming
} // namespace Media
namespace Media {
namespace Player { namespace Player {
void start(not_null<Audio::Instance*> instance); void start(not_null<Audio::Instance*> instance);
@ -59,6 +67,9 @@ public:
void playPauseCancelClicked(AudioMsgId::Type type); void playPauseCancelClicked(AudioMsgId::Type type);
void play(const AudioMsgId &audioId); void play(const AudioMsgId &audioId);
void playPause(const AudioMsgId &audioId);
TrackState getState(AudioMsgId::Type type) const;
AudioMsgId current(AudioMsgId::Type type) const { AudioMsgId current(AudioMsgId::Type type) const {
if (auto data = getData(type)) { if (auto data = getData(type)) {
return data->current; return data->current;
@ -86,7 +97,8 @@ public:
return false; return false;
} }
void startSeeking(AudioMsgId::Type type); void startSeeking(AudioMsgId::Type type);
void stopSeeking(AudioMsgId::Type type); void finishSeeking(AudioMsgId::Type type, float64 progress);
void cancelSeeking(AudioMsgId::Type type);
bool nextAvailable(AudioMsgId::Type type) const; bool nextAvailable(AudioMsgId::Type type) const;
bool previousAvailable(AudioMsgId::Type type) const; bool previousAvailable(AudioMsgId::Type type) const;
@ -99,12 +111,6 @@ public:
base::Observable<Switch> &switchToNextNotifier() { base::Observable<Switch> &switchToNextNotifier() {
return _switchToNextNotifier; return _switchToNextNotifier;
} }
base::Observable<bool> &usePanelPlayer() {
return _usePanelPlayer;
}
base::Observable<bool> &titleButtonOver() {
return _titleButtonOver;
}
base::Observable<bool> &playerWidgetOver() { base::Observable<bool> &playerWidgetOver() {
return _playerWidgetOver; return _playerWidgetOver;
} }
@ -125,21 +131,17 @@ public:
void documentLoadProgress(DocumentData *document); void documentLoadProgress(DocumentData *document);
void clear(); void handleLogout();
private: private:
Instance();
friend void start(not_null<Audio::Instance*> instance);
void setupShortcuts();
using SharedMediaType = Storage::SharedMediaType; using SharedMediaType = Storage::SharedMediaType;
using SliceKey = SparseIdsMergedSlice::Key; using SliceKey = SparseIdsMergedSlice::Key;
struct Streamed;
struct Data { struct Data {
Data(AudioMsgId::Type type, SharedMediaType overview) Data(AudioMsgId::Type type, SharedMediaType overview);
: type(type) Data(Data &&other);
, overview(overview) { Data &operator=(Data &&other);
} ~Data();
AudioMsgId::Type type; AudioMsgId::Type type;
Storage::SharedMediaType overview; Storage::SharedMediaType overview;
@ -156,8 +158,23 @@ private:
bool repeatEnabled = false; bool repeatEnabled = false;
bool isPlaying = false; bool isPlaying = false;
bool resumeOnCallEnd = false; bool resumeOnCallEnd = false;
std::unique_ptr<Streamed> streamed;
}; };
Instance();
~Instance();
friend void start(not_null<Audio::Instance*> instance);
friend void finish(not_null<Audio::Instance*> instance);
void setupShortcuts();
void playStreamed(
const AudioMsgId &audioId,
std::unique_ptr<Streaming::Loader> loader);
Streaming::PlaybackOptions streamingOptions(
const AudioMsgId &audioId,
crl::time position = 0);
// Observed notifications. // Observed notifications.
void handleSongUpdate(const AudioMsgId &audioId); void handleSongUpdate(const AudioMsgId &audioId);
@ -173,8 +190,15 @@ private:
bool moveInPlaylist(not_null<Data*> data, int delta, bool autonext); bool moveInPlaylist(not_null<Data*> data, int delta, bool autonext);
void preloadNext(not_null<Data*> data); void preloadNext(not_null<Data*> data);
HistoryItem *itemByIndex(not_null<Data*> data, int index); HistoryItem *itemByIndex(not_null<Data*> data, int index);
void handleLogout();
void handleStreamingUpdate(
not_null<Data*> data,
Streaming::Update &&update);
void handleStreamingError(
not_null<Data*> data,
Streaming::Error &&error);
void emitUpdate(AudioMsgId::Type type);
template <typename CheckCallback> template <typename CheckCallback>
void emitUpdate(AudioMsgId::Type type, CheckCallback check); void emitUpdate(AudioMsgId::Type type, CheckCallback check);
@ -200,8 +224,6 @@ private:
Data _voiceData; Data _voiceData;
base::Observable<Switch> _switchToNextNotifier; base::Observable<Switch> _switchToNextNotifier;
base::Observable<bool> _usePanelPlayer;
base::Observable<bool> _titleButtonOver;
base::Observable<bool> _playerWidgetOver; base::Observable<bool> _playerWidgetOver;
base::Observable<TrackState> _updatedNotifier; base::Observable<TrackState> _updatedNotifier;
base::Observable<AudioMsgId::Type> _tracksFinishedNotifier; base::Observable<AudioMsgId::Type> _tracksFinishedNotifier;

View File

@ -7,7 +7,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/ */
#include "media/player/media_player_panel.h" #include "media/player/media_player_panel.h"
#include "media/player/media_player_cover.h"
#include "media/player/media_player_instance.h" #include "media/player/media_player_instance.h"
#include "info/media/info_media_list_widget.h" #include "info/media/info_media_list_widget.h"
#include "history/history.h" #include "history/history.h"
@ -37,11 +36,9 @@ constexpr auto kDelayedHideTimeout = crl::time(3000);
Panel::Panel( Panel::Panel(
QWidget *parent, QWidget *parent,
not_null<Window::Controller*> window, not_null<Window::Controller*> window)
Layout layout)
: RpWidget(parent) : RpWidget(parent)
, AbstractController(window) , AbstractController(window)
, _layout(layout)
, _showTimer([this] { startShow(); }) , _showTimer([this] { startShow(); })
, _hideTimer([this] { startHideChecked(); }) , _hideTimer([this] { startHideChecked(); })
, _scroll(this, st::mediaPlayerScroll) { , _scroll(this, st::mediaPlayerScroll) {
@ -68,7 +65,7 @@ void Panel::resizeEvent(QResizeEvent *e) {
} }
void Panel::listHeightUpdated(int newHeight) { void Panel::listHeightUpdated(int newHeight) {
if (newHeight > emptyInnerHeight() || _cover) { if (newHeight > emptyInnerHeight()) {
updateSize(); updateSize();
} else { } else {
_hideTimer.callOnce(0); _hideTimer.callOnce(0);
@ -79,7 +76,7 @@ bool Panel::contentTooSmall() const {
const auto innerHeight = _scroll->widget() const auto innerHeight = _scroll->widget()
? _scroll->widget()->height() ? _scroll->widget()->height()
: emptyInnerHeight(); : emptyInnerHeight();
return (innerHeight <= emptyInnerHeight() && !_cover); return (innerHeight <= emptyInnerHeight());
} }
int Panel::emptyInnerHeight() const { int Panel::emptyInnerHeight() const {
@ -100,15 +97,6 @@ bool Panel::preventAutoHide() const {
void Panel::updateControlsGeometry() { void Panel::updateControlsGeometry() {
auto scrollTop = contentTop(); auto scrollTop = contentTop();
auto width = contentWidth(); auto width = contentWidth();
if (_cover) {
_cover->resizeToWidth(width);
_cover->moveToRight(contentRight(), scrollTop);
scrollTop += _cover->height();
if (_scrollShadow) {
_scrollShadow->resize(width, st::mediaPlayerScrollShadow.extend.bottom());
_scrollShadow->moveToRight(contentRight(), scrollTop);
}
}
auto scrollHeight = qMax(height() - scrollTop - contentBottom() - scrollMarginBottom(), 0); auto scrollHeight = qMax(height() - scrollTop - contentBottom() - scrollMarginBottom(), 0);
if (scrollHeight > 0) { if (scrollHeight > 0) {
_scroll->setGeometryToRight(contentRight(), scrollTop, width, scrollHeight); _scroll->setGeometryToRight(contentRight(), scrollTop, width, scrollHeight);
@ -135,9 +123,6 @@ void Panel::scrollPlaylistToCurrentTrack() {
void Panel::updateSize() { void Panel::updateSize() {
auto width = contentLeft() + st::mediaPlayerPanelWidth + contentRight(); auto width = contentLeft() + st::mediaPlayerPanelWidth + contentRight();
auto height = contentTop(); auto height = contentTop();
if (_cover) {
height += _cover->height();
}
auto listHeight = 0; auto listHeight = 0;
if (auto widget = _scroll->widget()) { if (auto widget = _scroll->widget()) {
listHeight = widget->height(); listHeight = widget->height();
@ -147,9 +132,6 @@ void Panel::updateSize() {
height += scrollHeight + contentBottom(); height += scrollHeight + contentBottom();
resize(width, height); resize(width, height);
_scroll->setVisible(scrollVisible); _scroll->setVisible(scrollVisible);
if (_scrollShadow) {
_scrollShadow->setVisible(scrollVisible);
}
} }
void Panel::paintEvent(QPaintEvent *e) { void Panel::paintEvent(QPaintEvent *e) {
@ -173,10 +155,10 @@ void Panel::paintEvent(QPaintEvent *e) {
// draw shadow // draw shadow
auto shadowedRect = myrtlrect(contentLeft(), contentTop(), contentWidth(), contentHeight()); auto shadowedRect = myrtlrect(contentLeft(), contentTop(), contentWidth(), contentHeight());
auto shadowedSides = (rtl() ? RectPart::Right : RectPart::Left) | RectPart::Bottom; auto shadowedSides = (rtl() ? RectPart::Right : RectPart::Left)
if (_layout != Layout::Full) { | RectPart::Bottom
shadowedSides |= (rtl() ? RectPart::Left : RectPart::Right) | RectPart::Top; | (rtl() ? RectPart::Left : RectPart::Right)
} | RectPart::Top;
Ui::Shadow::paint(p, shadowedRect, width(), st::defaultRoundShadow, shadowedSides); Ui::Shadow::paint(p, shadowedRect, width(), st::defaultRoundShadow, shadowedSides);
auto parts = RectPart::Full; auto parts = RectPart::Full;
App::roundRect(p, shadowedRect, st::menuBg, MenuCorners, nullptr, parts); App::roundRect(p, shadowedRect, st::menuBg, MenuCorners, nullptr, parts);
@ -228,13 +210,6 @@ void Panel::hideFromOther() {
void Panel::ensureCreated() { void Panel::ensureCreated() {
if (_scroll->widget()) return; if (_scroll->widget()) return;
if (_layout == Layout::Full) {
_cover.create(this);
setPinCallback(std::move(_pinCallback));
setCloseCallback(std::move(_closeCallback));
_scrollShadow.create(this, st::mediaPlayerScrollShadow, RectPart::Bottom);
}
_refreshListLifetime = instance()->playlistChanges( _refreshListLifetime = instance()->playlistChanges(
AudioMsgId::Type::Song AudioMsgId::Type::Song
) | rpl::start_with_next([this] { ) | rpl::start_with_next([this] {
@ -328,7 +303,6 @@ void Panel::refreshList() {
void Panel::performDestroy() { void Panel::performDestroy() {
if (!_scroll->widget()) return; if (!_scroll->widget()) return;
_cover.destroy();
_scroll->takeWidget<QWidget>().destroy(); _scroll->takeWidget<QWidget>().destroy();
_listPeer = _listMigratedPeer = nullptr; _listPeer = _listMigratedPeer = nullptr;
_refreshListLifetime.destroy(); _refreshListLifetime.destroy();
@ -344,20 +318,6 @@ void Panel::performDestroy() {
} }
} }
void Panel::setPinCallback(ButtonCallback &&callback) {
_pinCallback = std::move(callback);
if (_cover) {
_cover->setPinCallback(ButtonCallback(_pinCallback));
}
}
void Panel::setCloseCallback(ButtonCallback &&callback) {
_closeCallback = std::move(callback);
if (_cover) {
_cover->setCloseCallback(ButtonCallback(_closeCallback));
}
}
Info::Key Panel::key() const { Info::Key Panel::key() const {
return Info::Key(_listPeer); return Info::Key(_listPeer);
} }
@ -444,11 +404,11 @@ int Panel::contentLeft() const {
} }
int Panel::contentTop() const { int Panel::contentTop() const {
return (_layout == Layout::Full) ? 0 : st::mediaPlayerPanelMarginLeft; return st::mediaPlayerPanelMarginLeft;
} }
int Panel::contentRight() const { int Panel::contentRight() const {
return (_layout == Layout::Full) ? 0 : st::mediaPlayerPanelMarginLeft; return st::mediaPlayerPanelMarginLeft;
} }
int Panel::contentBottom() const { int Panel::contentBottom() const {

View File

@ -27,14 +27,9 @@ class CoverWidget;
class Panel : public Ui::RpWidget, private Info::AbstractController { class Panel : public Ui::RpWidget, private Info::AbstractController {
public: public:
enum class Layout {
Full,
OnlyPlaylist,
};
Panel( Panel(
QWidget *parent, QWidget *parent,
not_null<Window::Controller*> controller, not_null<Window::Controller*> controller);
Layout layout);
bool overlaps(const QRect &globalRect); bool overlaps(const QRect &globalRect);
@ -43,10 +38,6 @@ public:
void showFromOther(); void showFromOther();
void hideFromOther(); void hideFromOther();
using ButtonCallback = Fn<void()>;
void setPinCallback(ButtonCallback &&callback);
void setCloseCallback(ButtonCallback &&callback);
int bestPositionFor(int left) const; int bestPositionFor(int left) const;
protected: protected:
@ -96,7 +87,6 @@ private:
return static_cast<Info::AbstractController*>(this); return static_cast<Info::AbstractController*>(this);
} }
Layout _layout;
bool _hiding = false; bool _hiding = false;
QPixmap _cache; QPixmap _cache;
@ -107,10 +97,7 @@ private:
base::Timer _showTimer; base::Timer _showTimer;
base::Timer _hideTimer; base::Timer _hideTimer;
ButtonCallback _pinCallback, _closeCallback;
object_ptr<CoverWidget> _cover = { nullptr };
object_ptr<Ui::ScrollArea> _scroll; object_ptr<Ui::ScrollArea> _scroll;
object_ptr<Ui::Shadow> _scrollShadow = { nullptr };
rpl::lifetime _refreshListLifetime; rpl::lifetime _refreshListLifetime;
PeerData *_listPeer = nullptr; PeerData *_listPeer = nullptr;

View File

@ -156,8 +156,8 @@ Widget::Widget(QWidget *parent) : RpWidget(parent)
subscribe(instance()->tracksFinishedNotifier(), [this](AudioMsgId::Type type) { subscribe(instance()->tracksFinishedNotifier(), [this](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Voice) { if (type == AudioMsgId::Type::Voice) {
_voiceIsActive = false; _voiceIsActive = false;
auto currentSong = instance()->current(AudioMsgId::Type::Song); const auto currentSong = instance()->current(AudioMsgId::Type::Song);
auto songState = mixer()->currentState(AudioMsgId::Type::Song); const auto songState = instance()->getState(AudioMsgId::Type::Song);
if (currentSong == songState.id && !IsStoppedOrStopping(songState.state)) { if (currentSong == songState.id && !IsStoppedOrStopping(songState.state)) {
setType(AudioMsgId::Type::Song); setType(AudioMsgId::Type::Song);
} }
@ -191,8 +191,8 @@ void Widget::setCloseCallback(Fn<void()> callback) {
void Widget::stopAndClose() { void Widget::stopAndClose() {
_voiceIsActive = false; _voiceIsActive = false;
if (_type == AudioMsgId::Type::Voice) { if (_type == AudioMsgId::Type::Voice) {
auto songData = instance()->current(AudioMsgId::Type::Song); const auto songData = instance()->current(AudioMsgId::Type::Song);
auto songState = mixer()->currentState(AudioMsgId::Type::Song); const auto songState = instance()->getState(AudioMsgId::Type::Song);
if (songData == songState.id && !IsStoppedOrStopping(songState.state)) { if (songData == songState.id && !IsStoppedOrStopping(songState.state)) {
instance()->stop(AudioMsgId::Type::Voice); instance()->stop(AudioMsgId::Type::Voice);
return; return;
@ -248,12 +248,7 @@ void Widget::handleSeekFinished(float64 progress) {
auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs); auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs);
_seekPositionMs = -1; _seekPositionMs = -1;
auto state = mixer()->currentState(_type); instance()->finishSeeking(_type, progress);
if (state.id && state.length && state.frequency) {
mixer()->seek(_type, qRound(progress * state.length * 1000. / state.frequency));
}
instance()->stopSeeking(_type);
} }
void Widget::resizeEvent(QResizeEvent *e) { void Widget::resizeEvent(QResizeEvent *e) {
@ -382,8 +377,8 @@ void Widget::updatePlaybackSpeedIcon() {
void Widget::checkForTypeChange() { void Widget::checkForTypeChange() {
auto hasActiveType = [](AudioMsgId::Type type) { auto hasActiveType = [](AudioMsgId::Type type) {
auto current = instance()->current(type); const auto current = instance()->current(type);
auto state = mixer()->currentState(type); const auto state = instance()->getState(type);
return (current == state.id && !IsStoppedOrStopping(state.state)); return (current == state.id && !IsStoppedOrStopping(state.state));
}; };
if (hasActiveType(AudioMsgId::Type::Voice)) { if (hasActiveType(AudioMsgId::Type::Voice)) {
@ -410,7 +405,7 @@ void Widget::setType(AudioMsgId::Type type) {
} }
updateLabelsGeometry(); updateLabelsGeometry();
handleSongChange(); handleSongChange();
handleSongUpdate(mixer()->currentState(_type)); handleSongUpdate(instance()->getState(_type));
updateOverLabelsState(_labelsOver); updateOverLabelsState(_labelsOver);
_playlistChangesLifetime = instance()->playlistChanges( _playlistChangesLifetime = instance()->playlistChanges(
_type _type

View File

@ -29,7 +29,7 @@ AudioTrack::AudioTrack(
, _playPosition(options.position) { , _playPosition(options.position) {
Expects(_ready != nullptr); Expects(_ready != nullptr);
Expects(_error != nullptr); Expects(_error != nullptr);
Expects(_audioId.playId() != 0); Expects(_audioId.externalPlayId() != 0);
} }
int AudioTrack::streamIndex() const { int AudioTrack::streamIndex() const {
@ -66,11 +66,7 @@ bool AudioTrack::tryReadFirstFrame(Packet &&packet) {
} }
if (const auto error = ReadNextFrame(_stream)) { if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) { if (error.code() == AVERROR_EOF) {
if (!_initialSkippingFrame) {
return false;
}
// Return the last valid frame if we seek too far. // Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
return processFirstFrame(); return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) { } else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
return false; return false;
@ -83,10 +79,6 @@ bool AudioTrack::tryReadFirstFrame(Packet &&packet) {
} else if (_startedPosition < _options.position) { } else if (_startedPosition < _options.position) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames. // Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position. // Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = MakeFramePointer();
}
return true; return true;
} else { } else {
return processFirstFrame(); return processFirstFrame();
@ -111,12 +103,13 @@ bool AudioTrack::fillStateFromFrame() {
void AudioTrack::mixerInit() { void AudioTrack::mixerInit() {
Expects(!initialized()); Expects(!initialized());
auto data = std::make_unique<VideoSoundData>(); auto data = std::make_unique<ExternalSoundData>();
data->frame = _stream.frame.release(); data->frame = std::move(_stream.frame);
data->context = _stream.codec.release(); data->codec = std::move(_stream.codec);
data->frequency = _stream.frequency; data->frequency = _stream.frequency;
data->length = (_stream.duration * data->frequency) / 1000LL; data->length = (_stream.duration * data->frequency) / 1000LL;
data->speed = _options.speed; data->speed = _options.speed;
Media::Player::mixer()->play( Media::Player::mixer()->play(
_audioId, _audioId,
std::move(data), std::move(data),
@ -136,15 +129,14 @@ void AudioTrack::callReady() {
} }
void AudioTrack::mixerEnqueue(Packet &&packet) { void AudioTrack::mixerEnqueue(Packet &&packet) {
Media::Player::mixer()->feedFromVideo({ Media::Player::mixer()->feedFromExternal({
&packet.fields(), _audioId,
_audioId std::move(packet)
}); });
packet.release();
} }
void AudioTrack::mixerForceToBuffer() { void AudioTrack::mixerForceToBuffer() {
Media::Player::mixer()->forceToBufferVideo(_audioId); Media::Player::mixer()->forceToBufferExternal(_audioId);
} }
void AudioTrack::pause(crl::time time) { void AudioTrack::pause(crl::time time) {
@ -161,7 +153,7 @@ void AudioTrack::resume(crl::time time) {
void AudioTrack::setSpeed(float64 speed) { void AudioTrack::setSpeed(float64 speed) {
_options.speed = speed; _options.speed = speed;
Media::Player::mixer()->setSpeedFromVideo(_audioId, speed); Media::Player::mixer()->setSpeedFromExternal(_audioId, speed);
} }
rpl::producer<> AudioTrack::waitingForData() const { rpl::producer<> AudioTrack::waitingForData() const {
@ -178,8 +170,8 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
if (id != _audioId) { if (id != _audioId) {
return; return;
} }
const auto type = AudioMsgId::Type::Video; const auto state = Media::Player::mixer()->currentState(
const auto state = Media::Player::mixer()->currentState(type); _audioId.type());
if (state.id != _audioId) { if (state.id != _audioId) {
// #TODO streaming later muted by other // #TODO streaming later muted by other
return; return;
@ -212,7 +204,8 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
} }
AudioTrack::~AudioTrack() { AudioTrack::~AudioTrack() {
if (_audioId.playId()) { if (_audioId.externalPlayId()) {
LOG(("mixer()->stop with %1").arg(_audioId.externalPlayId()));
Media::Player::mixer()->stop(_audioId); Media::Player::mixer()->stop(_audioId);
} }
} }

View File

@ -35,6 +35,7 @@ struct PlaybackOptions {
Mode mode = Mode::Both; Mode mode = Mode::Both;
crl::time position = 0; crl::time position = 0;
float64 speed = 1.; // Valid values between 0.5 and 2. float64 speed = 1.; // Valid values between 0.5 and 2.
AudioMsgId audioId;
bool syncVideoByAudio = true; bool syncVideoByAudio = true;
bool dropStaleFrames = true; bool dropStaleFrames = true;
}; };

View File

@ -12,6 +12,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_audio_track.h" #include "media/streaming/media_streaming_audio_track.h"
#include "media/streaming/media_streaming_video_track.h" #include "media/streaming/media_streaming_video_track.h"
#include "media/audio/media_audio.h" // for SupportsSpeedControl() #include "media/audio/media_audio.h" // for SupportsSpeedControl()
#include "data/data_document.h" // for DocumentData::duration()
namespace Media { namespace Media {
namespace Streaming { namespace Streaming {
@ -20,6 +21,7 @@ namespace {
constexpr auto kReceivedTillEnd = std::numeric_limits<crl::time>::max(); constexpr auto kReceivedTillEnd = std::numeric_limits<crl::time>::max();
constexpr auto kBufferFor = 3 * crl::time(1000); constexpr auto kBufferFor = 3 * crl::time(1000);
constexpr auto kLoadInAdvanceFor = 64 * crl::time(1000); constexpr auto kLoadInAdvanceFor = 64 * crl::time(1000);
constexpr auto kMsFrequency = 1000; // 1000 ms per second.
// If we played for 3 seconds and got stuck it looks like we're loading // If we played for 3 seconds and got stuck it looks like we're loading
// slower than we're playing, so load full file in that case. // slower than we're playing, so load full file in that case.
@ -132,7 +134,9 @@ void Player::trackReceivedTill(
} else { } else {
state.receivedTill = position; state.receivedTill = position;
} }
if (!_pauseReading && bothReceivedEnough(kLoadInAdvanceFor)) { if (!_pauseReading
&& bothReceivedEnough(kLoadInAdvanceFor)
&& !receivedTillEnd()) {
_pauseReading = true; _pauseReading = true;
} }
} }
@ -150,7 +154,8 @@ void Player::trackPlayedTill(
state.position = position; state.position = position;
_updates.fire({ PlaybackUpdate<Track>{ position } }); _updates.fire({ PlaybackUpdate<Track>{ position } });
} }
if (_pauseReading && !bothReceivedEnough(kLoadInAdvanceFor)) { if (_pauseReading
&& (!bothReceivedEnough(kLoadInAdvanceFor) || receivedTillEnd())) {
_pauseReading = false; _pauseReading = false;
_file->wake(); _file->wake();
++wakes; ++wakes;
@ -213,7 +218,14 @@ void Player::fileReady(Stream &&video, Stream &&audio) {
}; };
const auto mode = _options.mode; const auto mode = _options.mode;
if (audio.codec && (mode == Mode::Audio || mode == Mode::Both)) { if (audio.codec && (mode == Mode::Audio || mode == Mode::Both)) {
_audioId = AudioMsgId::ForVideo(); if (_options.audioId) {
_audioId = AudioMsgId(
_options.audioId.audio(),
_options.audioId.contextId(),
AudioMsgId::CreateExternalPlayId());
} else {
_audioId = AudioMsgId::ForVideo();
}
_audio = std::make_unique<AudioTrack>( _audio = std::make_unique<AudioTrack>(
_options, _options,
std::move(audio), std::move(audio),
@ -429,6 +441,11 @@ bool Player::bothReceivedEnough(crl::time amount) const {
&& (!_video || trackReceivedEnough(info.video.state, amount)); && (!_video || trackReceivedEnough(info.video.state, amount));
} }
bool Player::receivedTillEnd() const {
return (!_video || FullTrackReceived(_information.video.state))
&& (!_audio || FullTrackReceived(_information.audio.state));
}
void Player::checkResumeFromWaitingForData() { void Player::checkResumeFromWaitingForData() {
if (_pausedByWaitingForData && bothReceivedEnough(kBufferFor)) { if (_pausedByWaitingForData && bothReceivedEnough(kBufferFor)) {
_pausedByWaitingForData = false; _pausedByWaitingForData = false;
@ -446,8 +463,7 @@ void Player::start() {
_audio ? _audio->waitingForData() : rpl::never(), _audio ? _audio->waitingForData() : rpl::never(),
_video ? _video->waitingForData() : rpl::never() _video ? _video->waitingForData() : rpl::never()
) | rpl::filter([=] { ) | rpl::filter([=] {
return !FullTrackReceived(_information.video.state) return !receivedTillEnd();
|| !FullTrackReceived(_information.audio.state);
}) | rpl::start_with_next([=] { }) | rpl::start_with_next([=] {
_pausedByWaitingForData = true; _pausedByWaitingForData = true;
updatePausedState(); updatePausedState();
@ -566,6 +582,38 @@ QImage Player::frame(const FrameRequest &request) const {
return _video->frame(request); return _video->frame(request);
} }
Media::Player::TrackState Player::prepareLegacyState() const {
using namespace Media::Player;
auto result = Media::Player::TrackState();
result.id = _audioId.externalPlayId() ? _audioId : _options.audioId;
result.state = finished()
? State::StoppedAtEnd
: paused()
? State::Paused
: State::Playing;
result.position = std::max(
_information.audio.state.position,
_information.video.state.position);
if (result.position == kTimeUnknown) {
result.position = _options.position;
}
result.length = std::max(
_information.audio.state.duration,
_information.video.state.duration);
if (result.length == kTimeUnknown && _options.audioId.audio()) {
const auto document = _options.audioId.audio();
const auto duration = document->song()
? document->song()->duration
: document->duration();
if (duration > 0) {
result.length = duration * crl::time(1000);
}
}
result.frequency = kMsFrequency;
return result;
}
rpl::lifetime &Player::lifetime() { rpl::lifetime &Player::lifetime() {
return _lifetime; return _lifetime;
} }

View File

@ -16,6 +16,12 @@ namespace Data {
class Session; class Session;
} // namespace Data } // namespace Data
namespace Media {
namespace Player {
struct TrackState;
} // namespace Player
} // namespace Media
namespace Media { namespace Media {
namespace Streaming { namespace Streaming {
@ -54,6 +60,8 @@ public:
[[nodiscard]] QImage frame(const FrameRequest &request) const; [[nodiscard]] QImage frame(const FrameRequest &request) const;
[[nodiscard]] Media::Player::TrackState prepareLegacyState() const;
[[nodiscard]] rpl::lifetime &lifetime(); [[nodiscard]] rpl::lifetime &lifetime();
~Player(); ~Player();
@ -95,6 +103,7 @@ private:
const TrackState &state, const TrackState &state,
crl::time amount) const; crl::time amount) const;
[[nodiscard]] bool bothReceivedEnough(crl::time amount) const; [[nodiscard]] bool bothReceivedEnough(crl::time amount) const;
[[nodiscard]] bool receivedTillEnd() const;
void checkResumeFromWaitingForData(); void checkResumeFromWaitingForData();
template <typename Track> template <typename Track>

View File

@ -379,9 +379,6 @@ auto Reader::Slices::fill(int offset, bytes::span buffer) -> FillResult {
if (cacheNotLoaded(sliceIndex) if (cacheNotLoaded(sliceIndex)
&& !(_data[sliceIndex].flags & Flag::LoadingFromCache)) { && !(_data[sliceIndex].flags & Flag::LoadingFromCache)) {
_data[sliceIndex].flags |= Flag::LoadingFromCache; _data[sliceIndex].flags |= Flag::LoadingFromCache;
if (sliceIndex == 23) {
int a = 0;
}
result.sliceNumbersFromCache.add(sliceIndex + 1); result.sliceNumbersFromCache.add(sliceIndex + 1);
} }
}; };

View File

@ -33,12 +33,6 @@ bool IsAlignedImage(const QImage &image) {
&& !(image.bytesPerLine() % kAlignImageBy); && !(image.bytesPerLine() % kAlignImageBy);
} }
void ClearFrameMemory(AVFrame *frame) {
if (frame && frame->data[0]) {
av_frame_unref(frame);
}
}
} // namespace } // namespace
bool GoodStorageForFrame(const QImage &storage, QSize size) { bool GoodStorageForFrame(const QImage &storage, QSize size) {
@ -112,6 +106,16 @@ FramePointer MakeFramePointer() {
return FramePointer(av_frame_alloc()); return FramePointer(av_frame_alloc());
} }
bool FrameHasData(AVFrame *frame) {
return (frame && frame->data[0] != nullptr);
}
void ClearFrameMemory(AVFrame *frame) {
if (FrameHasData(frame)) {
av_frame_unref(frame);
}
}
void FrameDeleter::operator()(AVFrame *value) { void FrameDeleter::operator()(AVFrame *value) {
av_frame_free(&value); av_frame_free(&value);
} }
@ -288,17 +292,20 @@ bool GoodForRequest(const QImage &image, const FrameRequest &request) {
&& (request.resize == image.size()); && (request.resize == image.size());
} }
QImage ConvertFrame(Stream &stream, QSize resize, QImage storage) { QImage ConvertFrame(
Expects(stream.frame != nullptr); Stream &stream,
AVFrame *frame,
QSize resize,
QImage storage) {
Expects(frame != nullptr);
const auto frame = stream.frame.get();
const auto frameSize = QSize(frame->width, frame->height); const auto frameSize = QSize(frame->width, frame->height);
if (frameSize.isEmpty()) { if (frameSize.isEmpty()) {
LOG(("Streaming Error: Bad frame size %1,%2" LOG(("Streaming Error: Bad frame size %1,%2"
).arg(frameSize.width() ).arg(frameSize.width()
).arg(frameSize.height())); ).arg(frameSize.height()));
return QImage(); return QImage();
} else if (!frame->data[0]) { } else if (!FrameHasData(frame)) {
LOG(("Streaming Error: Bad frame data.")); LOG(("Streaming Error: Bad frame data."));
return QImage(); return QImage();
} }
@ -359,6 +366,8 @@ QImage ConvertFrame(Stream &stream, QSize resize, QImage storage) {
return QImage(); return QImage();
} }
} }
ClearFrameMemory(frame);
return storage; return storage;
} }

View File

@ -119,13 +119,15 @@ struct CodecDeleter {
void operator()(AVCodecContext *value); void operator()(AVCodecContext *value);
}; };
using CodecPointer = std::unique_ptr<AVCodecContext, CodecDeleter>; using CodecPointer = std::unique_ptr<AVCodecContext, CodecDeleter>;
CodecPointer MakeCodecPointer(not_null<AVStream*> stream); [[nodiscard]] CodecPointer MakeCodecPointer(not_null<AVStream*> stream);
struct FrameDeleter { struct FrameDeleter {
void operator()(AVFrame *value); void operator()(AVFrame *value);
}; };
using FramePointer = std::unique_ptr<AVFrame, FrameDeleter>; using FramePointer = std::unique_ptr<AVFrame, FrameDeleter>;
FramePointer MakeFramePointer(); [[nodiscard]] FramePointer MakeFramePointer();
[[nodiscard]] bool FrameHasData(AVFrame *frame);
void ClearFrameMemory(AVFrame *frame);
struct SwsContextDeleter { struct SwsContextDeleter {
QSize resize; QSize resize;
@ -135,7 +137,7 @@ struct SwsContextDeleter {
void operator()(SwsContext *value); void operator()(SwsContext *value);
}; };
using SwsContextPointer = std::unique_ptr<SwsContext, SwsContextDeleter>; using SwsContextPointer = std::unique_ptr<SwsContext, SwsContextDeleter>;
SwsContextPointer MakeSwsContextPointer( [[nodiscard]] SwsContextPointer MakeSwsContextPointer(
not_null<AVFrame*> frame, not_null<AVFrame*> frame,
QSize resize, QSize resize,
SwsContextPointer *existing = nullptr); SwsContextPointer *existing = nullptr);
@ -179,7 +181,8 @@ void LogError(QLatin1String method, AvErrorWrap error);
[[nodiscard]] bool GoodStorageForFrame(const QImage &storage, QSize size); [[nodiscard]] bool GoodStorageForFrame(const QImage &storage, QSize size);
[[nodiscard]] QImage CreateFrameStorage(QSize size); [[nodiscard]] QImage CreateFrameStorage(QSize size);
[[nodiscard]] QImage ConvertFrame( [[nodiscard]] QImage ConvertFrame(
Stream& stream, Stream &stream,
AVFrame *frame,
QSize resize, QSize resize,
QImage storage); QImage storage);
[[nodiscard]] QImage PrepareByRequest( [[nodiscard]] QImage PrepareByRequest(

View File

@ -85,9 +85,6 @@ private:
bool _queued = false; bool _queued = false;
base::ConcurrentTimer _readFramesTimer; base::ConcurrentTimer _readFramesTimer;
// For initial frame skipping for an exact seek.
FramePointer _initialSkippingFrame;
}; };
VideoTrackObject::VideoTrackObject( VideoTrackObject::VideoTrackObject(
@ -190,6 +187,7 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
_error(); _error();
return false; return false;
} }
std::swap(frame->decoded, _stream.frame);
frame->position = position; frame->position = position;
frame->displayed = kTimeUnknown; frame->displayed = kTimeUnknown;
return true; return true;
@ -204,6 +202,7 @@ void VideoTrackObject::presentFrameIfNeeded() {
frame->request = _request; frame->request = _request;
frame->original = ConvertFrame( frame->original = ConvertFrame(
_stream, _stream,
frame->decoded.get(),
frame->request.resize, frame->request.resize,
std::move(frame->original)); std::move(frame->original));
if (frame->original.isNull()) { if (frame->original.isNull()) {
@ -294,11 +293,7 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
auto frame = QImage(); auto frame = QImage();
if (const auto error = ReadNextFrame(_stream)) { if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) { if (error.code() == AVERROR_EOF) {
if (!_initialSkippingFrame) {
return false;
}
// Return the last valid frame if we seek too far. // Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
return processFirstFrame(); return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) { } else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
return false; return false;
@ -311,10 +306,6 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
} else if (_syncTimePoint.trackTime < _options.position) { } else if (_syncTimePoint.trackTime < _options.position) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames. // Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position. // Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = MakeFramePointer();
}
return true; return true;
} else { } else {
return processFirstFrame(); return processFirstFrame();
@ -322,7 +313,11 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
} }
bool VideoTrackObject::processFirstFrame() { bool VideoTrackObject::processFirstFrame() {
auto frame = ConvertFrame(_stream, QSize(), QImage()); auto frame = ConvertFrame(
_stream,
_stream.frame.get(),
QSize(),
QImage());
if (frame.isNull()) { if (frame.isNull()) {
return false; return false;
} }
@ -387,9 +382,9 @@ TimePoint VideoTrackObject::trackTime() const {
} }
Assert(_resumedTime != kTimeUnknown); Assert(_resumedTime != kTimeUnknown);
if (_options.syncVideoByAudio && _audioId.playId()) { if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
const auto mixer = Media::Player::mixer(); const auto mixer = Media::Player::mixer();
const auto point = mixer->getVideoSyncTimePoint(_audioId); const auto point = mixer->getExternalSyncTimePoint(_audioId);
if (point && point.worldTime > _resumedTime) { if (point && point.worldTime > _resumedTime) {
_syncTimePoint = point; _syncTimePoint = point;
} }

View File

@ -57,6 +57,7 @@ private:
friend class VideoTrackObject; friend class VideoTrackObject;
struct Frame { struct Frame {
FramePointer decoded = MakeFramePointer();
QImage original; QImage original;
crl::time position = kTimeUnknown; crl::time position = kTimeUnknown;
crl::time displayed = kTimeUnknown; crl::time displayed = kTimeUnknown;

View File

@ -45,7 +45,6 @@ namespace Media {
namespace View { namespace View {
namespace { namespace {
constexpr auto kMsFrequency = 1000; // 1000 ms per second.
constexpr auto kPreloadCount = 4; constexpr auto kPreloadCount = 4;
// Preload X message ids before and after current. // Preload X message ids before and after current.
@ -321,30 +320,6 @@ QImage OverlayWidget::videoFrame() const {
: _streamed->info.video.cover; : _streamed->info.video.cover;
} }
crl::time OverlayWidget::streamedPosition() const {
Expects(_streamed != nullptr);
const auto result = std::max(
_streamed->info.audio.state.position,
_streamed->info.video.state.position);
return (result != kTimeUnknown) ? result : crl::time(0);
}
crl::time OverlayWidget::streamedDuration() const {
Expects(_streamed != nullptr);
const auto result = std::max(
_streamed->info.audio.state.duration,
_streamed->info.video.state.duration);
if (result != kTimeUnknown) {
return result;
}
const auto duration = _doc->song()
? _doc->song()->duration
: _doc->duration();
return (duration > 0) ? duration * crl::time(1000) : kTimeUnknown;
}
bool OverlayWidget::documentContentShown() const { bool OverlayWidget::documentContentShown() const {
return _doc && (!_current.isNull() || videoShown()); return _doc && (!_current.isNull() || videoShown());
} }
@ -1892,7 +1867,7 @@ void OverlayWidget::initStreaming() {
handleStreamingUpdate(std::move(update)); handleStreamingUpdate(std::move(update));
}, [=](Streaming::Error &&error) { }, [=](Streaming::Error &&error) {
handleStreamingError(std::move(error)); handleStreamingError(std::move(error));
}, _streamed->controls.lifetime()); }, _streamed->player.lifetime());
restartAtSeekPosition(0); restartAtSeekPosition(0);
} }
@ -1938,6 +1913,7 @@ void OverlayWidget::handleStreamingUpdate(Streaming::Update &&update) {
}, [&](UpdateVideo &update) { }, [&](UpdateVideo &update) {
_streamed->info.video.state.position = update.position; _streamed->info.video.state.position = update.position;
this->update(contentRect()); this->update(contentRect());
Core::App().updateNonIdle();
updatePlaybackState(); updatePlaybackState();
}, [&](PreloadedAudio &update) { }, [&](PreloadedAudio &update) {
_streamed->info.audio.state.receivedTill = update.till; _streamed->info.audio.state.receivedTill = update.till;
@ -2134,15 +2110,17 @@ void OverlayWidget::playbackToggleFullScreen() {
void OverlayWidget::updatePlaybackState() { void OverlayWidget::updatePlaybackState() {
Expects(_streamed != nullptr); Expects(_streamed != nullptr);
auto state = Player::TrackState(); auto state = _streamed->player.prepareLegacyState();
state.state = _streamed->player.finished() if (state.length == kTimeUnknown) {
? Player::State::StoppedAtEnd const auto duration = _doc->song()
: _streamed->player.paused() ? _doc->song()->duration
? Player::State::Paused : _doc->duration();
: Player::State::Playing; if (duration > 0) {
state.position = streamedPosition(); state.length = std::max(
state.length = streamedDuration(); duration * crl::time(1000),
state.frequency = kMsFrequency; state.position);
}
}
if (state.position != kTimeUnknown && state.length != kTimeUnknown) { if (state.position != kTimeUnknown && state.length != kTimeUnknown) {
_streamed->controls.updatePlayback(state); _streamed->controls.updatePlayback(state);
} }

View File

@ -282,8 +282,6 @@ private:
[[nodiscard]] QSize videoSize() const; [[nodiscard]] QSize videoSize() const;
[[nodiscard]] bool videoIsGifv() const; [[nodiscard]] bool videoIsGifv() const;
[[nodiscard]] QImage videoFrame() const; [[nodiscard]] QImage videoFrame() const;
[[nodiscard]] crl::time streamedPosition() const;
[[nodiscard]] crl::time streamedDuration() const;
[[nodiscard]] bool documentContentShown() const; [[nodiscard]] bool documentContentShown() const;
[[nodiscard]] bool documentBubbleShown() const; [[nodiscard]] bool documentBubbleShown() const;
void clearStreaming(); void clearStreaming();

View File

@ -849,15 +849,18 @@ bool Voice::updateStatusText() {
statusSize = FileStatusSizeFailed; statusSize = FileStatusSizeFailed;
} else if (_data->loaded()) { } else if (_data->loaded()) {
statusSize = FileStatusSizeLoaded; statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.playId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
} else { } else {
statusSize = FileStatusSizeReady; statusSize = FileStatusSizeReady;
} }
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (statusSize != _status.size()) { if (statusSize != _status.size()) {
_status.update(statusSize, _data->size, duration(), realDuration); _status.update(statusSize, _data->size, duration(), realDuration);
} }
@ -943,7 +946,7 @@ void Document::paint(Painter &p, const QRect &clip, TextSelection selection, con
if (selected) { if (selected) {
p.setBrush(st::msgFileInBgSelected); p.setBrush(st::msgFileInBgSelected);
} else { } else {
auto over = ClickHandler::showAsActive(loaded ? _openl : (_data->loading() ? _cancell : _openl)); auto over = ClickHandler::showAsActive((_data->loading() || _data->uploading()) ? _cancell : _data->canBePlayed() ? _openl : _openl);
p.setBrush(anim::brush(_st.songIconBg, _st.songOverBg, _a_iconOver.current(context->ms, over ? 1. : 0.))); p.setBrush(anim::brush(_st.songIconBg, _st.songOverBg, _a_iconOver.current(context->ms, over ? 1. : 0.)));
} }
@ -961,10 +964,10 @@ void Document::paint(Painter &p, const QRect &clip, TextSelection selection, con
auto icon = [&] { auto icon = [&] {
if (showPause) { if (showPause) {
return &(selected ? _st.songPauseSelected : _st.songPause); return &(selected ? _st.songPauseSelected : _st.songPause);
} else if (loaded) { } else if (_data->loading() || _data->uploading()) {
return &(selected ? _st.songPlaySelected : _st.songPlay);
} else if (_data->loading()) {
return &(selected ? _st.songCancelSelected : _st.songCancel); return &(selected ? _st.songCancelSelected : _st.songCancel);
} else if (_data->canBePlayed()) {
return &(selected ? _st.songPlaySelected : _st.songPlay);
} }
return &(selected ? _st.songDownloadSelected : _st.songDownload); return &(selected ? _st.songDownloadSelected : _st.songDownload);
}(); }();
@ -1100,10 +1103,10 @@ TextState Document::getState(
_st.songThumbSize, _st.songThumbSize,
_width); _width);
if (inner.contains(point)) { if (inner.contains(point)) {
const auto link = loaded const auto link = (_data->loading() || _data->uploading())
? _openl
: (_data->loading() || _data->uploading())
? _cancell ? _cancell
: _data->canBePlayed()
? _openl
: _openl; : _openl;
return { parent(), link }; return { parent(), link };
} }
@ -1217,23 +1220,23 @@ bool Document::updateStatusText() {
} else if (_data->loading()) { } else if (_data->loading()) {
statusSize = _data->loadOffset(); statusSize = _data->loadOffset();
} else if (_data->loaded()) { } else if (_data->loaded()) {
if (_data->isSong()) { statusSize = FileStatusSizeLoaded;
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, parent()->fullId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
} else {
statusSize = FileStatusSizeLoaded;
}
} else { } else {
statusSize = FileStatusSizeReady; statusSize = FileStatusSizeReady;
} }
if (_data->isSong()) {
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.externalPlayId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId(), state.id.externalPlayId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
}
if (statusSize != _status.size()) { if (statusSize != _status.size()) {
_status.update(statusSize, _data->size, _data->isSong() ? _data->song()->duration : -1, realDuration); _status.update(statusSize, _data->size, _data->isSong() ? _data->song()->duration : -1, realDuration);
} }

View File

@ -440,8 +440,6 @@
<(src_loc)/media/clip/media_clip_reader.h <(src_loc)/media/clip/media_clip_reader.h
<(src_loc)/media/player/media_player_button.cpp <(src_loc)/media/player/media_player_button.cpp
<(src_loc)/media/player/media_player_button.h <(src_loc)/media/player/media_player_button.h
<(src_loc)/media/player/media_player_cover.cpp
<(src_loc)/media/player/media_player_cover.h
<(src_loc)/media/player/media_player_float.cpp <(src_loc)/media/player/media_player_float.cpp
<(src_loc)/media/player/media_player_float.h <(src_loc)/media/player/media_player_float.h
<(src_loc)/media/player/media_player_instance.cpp <(src_loc)/media/player/media_player_instance.cpp