Play streaming audio in player.

This commit is contained in:
John Preston 2019-03-01 01:03:25 +04:00
parent f1e0cd6c1d
commit fde8dd9607
44 changed files with 998 additions and 1445 deletions

View File

@ -209,7 +209,9 @@ void Application::showPhoto(
}
void Application::showDocument(not_null<DocumentData*> document, HistoryItem *item) {
if (cUseExternalVideoPlayer() && document->isVideoFile()) {
if (cUseExternalVideoPlayer()
&& document->isVideoFile()
&& document->loaded()) {
QDesktopServices::openUrl(QUrl("file:///" + document->location(false).fname));
} else {
_mediaView->showDocument(document, item);
@ -738,6 +740,11 @@ void Application::authSessionDestroy() {
if (_authSession) {
unlockTerms();
_mtproto->clearGlobalHandlers();
// Must be called before Auth().data() is destroyed,
// because streaming media holds pointers to it.
Media::Player::instance()->handleLogout();
_authSession = nullptr;
authSessionChanged().notify(true);
Notify::unreadCounterUpdated();

View File

@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/media_active_cache.h"
#include "core/mime_type.h"
#include "media/audio/media_audio.h"
#include "media/player/media_player_instance.h"
#include "storage/localstorage.h"
#include "platform/platform_specific.h"
#include "history/history.h"
@ -27,9 +28,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/image/image_source.h"
#include "mainwindow.h"
#include "core/application.h"
// #TODO streaming ui
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_loader_mtproto.h"
namespace {
@ -294,52 +292,26 @@ void DocumentOpenClickHandler::Open(
ActionOnLoad action) {
if (!data->date) return;
auto msgId = context ? context->fullId() : FullMsgId();
bool playVoice = data->isVoiceMessage();
bool playMusic = data->isAudioFile();
bool playVideo = data->isVideoFile();
bool playAnimation = data->isAnimation();
auto &location = data->location(true);
if (data->isTheme()) {
if (!location.isEmpty() && location.accessEnable()) {
Core::App().showDocument(data, context);
location.accessDisable();
return;
}
}
if (data->canBePlayed()) {
const auto msgId = context ? context->fullId() : FullMsgId();
const auto playVoice = data->isVoiceMessage();
const auto playAnimation = data->isAnimation();
const auto &location = data->location(true);
if (data->isTheme() && !location.isEmpty() && location.accessEnable()) {
Core::App().showDocument(data, context);
location.accessDisable();
return;
} else if (data->canBePlayed()) {
if (data->isAudioFile()) {
Media::Player::instance()->playPause({ data, msgId });
} else {
Core::App().showDocument(data, context);
}
return;
}
if (!location.isEmpty() || (!data->data().isEmpty() && (playVoice || playMusic || playVideo || playAnimation))) {
if (!location.isEmpty() || (!data->data().isEmpty() && (playVoice || playAnimation))) {
using State = Media::Player::State;
if (playVoice) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else {
auto audio = AudioMsgId(data, msgId);
Media::Player::mixer()->play(audio);
Media::Player::Updated().notify(audio);
data->owner().markMediaRead(data);
}
} else if (playMusic) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else {
auto song = AudioMsgId(data, msgId);
Media::Player::mixer()->play(song);
Media::Player::Updated().notify(song);
}
Media::Player::instance()->playPause({ data, msgId });
} else if (data->size < App::kImageSizeLimit) {
if (!data->data().isEmpty() && playAnimation) {
if (action == ActionOnLoadPlayInline && context) {
@ -750,35 +722,8 @@ void DocumentData::performActionOnLoad() {
}
}
using State = Media::Player::State;
if (playVoice) {
if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) {
Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId));
_owner->markMediaRead(this);
}
}
} else if (playMusic) {
if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStoppedOrStopping(state.state)) {
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) {
auto song = AudioMsgId(this, _actionOnLoadMsgId);
Media::Player::mixer()->play(song);
Media::Player::Updated().notify(song);
}
}
if (playVoice || playMusic) {
DocumentOpenClickHandler::Open({}, this, item, ActionOnLoadNone);
} else if (playAnimation) {
if (loaded()) {
if (_actionOnLoad == ActionOnLoadPlayInline && item) {

View File

@ -173,9 +173,14 @@ bool ReplyPreview::empty() const {
} // namespace Data
uint32 AudioMsgId::CreateExternalPlayId() {
static auto Result = uint32(0);
return ++Result ? Result : ++Result;
}
AudioMsgId AudioMsgId::ForVideo() {
auto result = AudioMsgId();
result._playId = rand_value<uint32>();
result._externalPlayId = CreateExternalPlayId();
result._type = Type::Video;
return result;
}

View File

@ -363,12 +363,14 @@ public:
AudioMsgId(
DocumentData *audio,
const FullMsgId &msgId,
uint32 playId = 0)
uint32 externalPlayId = 0)
: _audio(audio)
, _contextId(msgId)
, _playId(playId) {
, _externalPlayId(externalPlayId) {
setTypeFromAudio();
}
[[nodiscard]] static uint32 CreateExternalPlayId();
[[nodiscard]] static AudioMsgId ForVideo();
Type type() const {
@ -380,8 +382,8 @@ public:
FullMsgId contextId() const {
return _contextId;
}
uint32 playId() const {
return _playId;
uint32 externalPlayId() const {
return _externalPlayId;
}
explicit operator bool() const {
@ -394,7 +396,7 @@ private:
DocumentData *_audio = nullptr;
Type _type = Type::Unknown;
FullMsgId _contextId;
uint32 _playId = 0;
uint32 _externalPlayId = 0;
};
@ -408,13 +410,13 @@ inline bool operator<(const AudioMsgId &a, const AudioMsgId &b) {
} else if (b.contextId() < a.contextId()) {
return false;
}
return (a.playId() < b.playId());
return (a.externalPlayId() < b.externalPlayId());
}
inline bool operator==(const AudioMsgId &a, const AudioMsgId &b) {
return (a.audio() == b.audio())
&& (a.contextId() == b.contextId())
&& (a.playId() == b.playId());
&& (a.externalPlayId() == b.externalPlayId());
}
inline bool operator!=(const AudioMsgId &a, const AudioMsgId &b) {

View File

@ -855,5 +855,5 @@ void HistoryDocumentVoice::startSeeking() {
void HistoryDocumentVoice::stopSeeking() {
_seeking = false;
Media::Player::instance()->stopSeeking(AudioMsgId::Type::Voice);
Media::Player::instance()->cancelSeeking(AudioMsgId::Type::Voice);
}

View File

@ -252,12 +252,8 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
p.setPen(Qt::NoPen);
if (selected) {
p.setBrush(st::msgDateImgBgSelected);
} else if (isThumbAnimation(ms)) {
auto over = _animation->a_thumbOver.current();
p.setBrush(anim::brush(st::msgDateImgBg, st::msgDateImgBgOver, over));
} else {
auto over = ClickHandler::showAsActive(_data->loading() ? _cancell : _savel);
p.setBrush(over ? st::msgDateImgBgOver : st::msgDateImgBg);
p.setBrush(st::msgDateImgBg);
}
p.setOpacity(radialOpacity * p.opacity());
@ -303,12 +299,8 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
p.setPen(Qt::NoPen);
if (selected) {
p.setBrush(outbg ? st::msgFileOutBgSelected : st::msgFileInBgSelected);
} else if (isThumbAnimation(ms)) {
auto over = _animation->a_thumbOver.current();
p.setBrush(anim::brush(outbg ? st::msgFileOutBg : st::msgFileInBg, outbg ? st::msgFileOutBgOver : st::msgFileInBgOver, over));
} else {
auto over = ClickHandler::showAsActive(_data->loading() ? _cancell : _savel);
p.setBrush(outbg ? (over ? st::msgFileOutBgOver : st::msgFileOutBg) : (over ? st::msgFileInBgOver : st::msgFileInBg));
p.setBrush(outbg ? st::msgFileOutBg : st::msgFileInBg);
}
{
@ -322,13 +314,13 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
_animation->radial.draw(p, rinner, st::msgFileRadialLine, fg);
}
auto icon = ([showPause, radial, this, loaded, outbg, selected] {
auto icon = [&] {
if (showPause) {
return &(outbg ? (selected ? st::historyFileOutPauseSelected : st::historyFileOutPause) : (selected ? st::historyFileInPauseSelected : st::historyFileInPause));
} else if (radial || _data->loading()) {
return &(outbg ? (selected ? st::historyFileOutCancelSelected : st::historyFileOutCancel) : (selected ? st::historyFileInCancelSelected : st::historyFileInCancel));
} else if (loaded) {
if (_data->isAudioFile() || _data->isVoiceMessage()) {
} else if (loaded || _data->canBePlayed()) {
if (_data->canBePlayed()) {
return &(outbg ? (selected ? st::historyFileOutPlaySelected : st::historyFileOutPlay) : (selected ? st::historyFileInPlaySelected : st::historyFileInPlay));
} else if (_data->isImage()) {
return &(outbg ? (selected ? st::historyFileOutImageSelected : st::historyFileOutImage) : (selected ? st::historyFileInImageSelected : st::historyFileInImage));
@ -336,7 +328,7 @@ void HistoryDocument::draw(Painter &p, const QRect &r, TextSelection selection,
return &(outbg ? (selected ? st::historyFileOutDocumentSelected : st::historyFileOutDocument) : (selected ? st::historyFileInDocumentSelected : st::historyFileInDocument));
}
return &(outbg ? (selected ? st::historyFileOutDownloadSelected : st::historyFileOutDownload) : (selected ? st::historyFileInDownloadSelected : st::historyFileInDownload));
})();
}();
icon->paintInCenter(p, inner);
}
auto namewidth = width() - nameleft - nameright;
@ -465,20 +457,12 @@ TextState HistoryDocument::textState(QPoint point, StateRequest request) const {
auto nameleft = 0, nametop = 0, nameright = 0, statustop = 0, linktop = 0, bottom = 0;
auto topMinus = isBubbleTop() ? 0 : st::msgFileTopMinus;
if (auto thumbed = Get<HistoryDocumentThumbed>()) {
if (const auto thumbed = Get<HistoryDocumentThumbed>()) {
nameleft = st::msgFileThumbPadding.left() + st::msgFileThumbSize + st::msgFileThumbPadding.right();
nameright = st::msgFileThumbPadding.left();
nametop = st::msgFileThumbNameTop - topMinus;
linktop = st::msgFileThumbLinkTop - topMinus;
bottom = st::msgFileThumbPadding.top() + st::msgFileThumbSize + st::msgFileThumbPadding.bottom() - topMinus;
QRect rthumb(rtlrect(st::msgFileThumbPadding.left(), st::msgFileThumbPadding.top() - topMinus, st::msgFileThumbSize, st::msgFileThumbSize, width()));
if ((_data->loading() || _data->uploading() || !loaded) && rthumb.contains(point)) {
result.link = (_data->loading() || _data->uploading()) ? _cancell : _savel;
return result;
}
if (_data->status != FileUploadFailed) {
if (rtlrect(nameleft, linktop, thumbed->_linkw, st::semiboldFont->height, width()).contains(point)) {
result.link = (_data->loading() || _data->uploading())
@ -487,25 +471,14 @@ TextState HistoryDocument::textState(QPoint point, StateRequest request) const {
return result;
}
}
} else {
nameleft = st::msgFilePadding.left() + st::msgFileSize + st::msgFilePadding.right();
nameright = st::msgFilePadding.left();
nametop = st::msgFileNameTop - topMinus;
bottom = st::msgFilePadding.top() + st::msgFileSize + st::msgFilePadding.bottom() - topMinus;
QRect inner(rtlrect(st::msgFilePadding.left(), st::msgFilePadding.top() - topMinus, st::msgFileSize, st::msgFileSize, width()));
if ((_data->loading() || _data->uploading() || !loaded) && inner.contains(point)) {
result.link = (_data->loading() || _data->uploading()) ? _cancell : _savel;
return result;
}
}
if (auto voice = Get<HistoryDocumentVoice>()) {
if (const auto voice = Get<HistoryDocumentVoice>()) {
auto namewidth = width() - nameleft - nameright;
auto waveformbottom = st::msgFilePadding.top() - topMinus + st::msgWaveformMax + st::msgWaveformMin;
if (QRect(nameleft, nametop, namewidth, waveformbottom - nametop).contains(point)) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId())
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
if (!voice->seeking()) {
voice->setSeekingStart((point.x() - nameleft) / float64(namewidth));
@ -532,7 +505,13 @@ TextState HistoryDocument::textState(QPoint point, StateRequest request) const {
}
}
if (QRect(0, 0, width(), painth).contains(point) && !_data->loading() && !_data->uploading() && !_data->isNull()) {
result.link = _openl;
if (_data->loading() || _data->uploading()) {
result.link = _cancell;
} else if (loaded || _data->canBePlayed()) {
result.link = _openl;
} else {
result.link = _savel;
}
return result;
}
return result;
@ -622,53 +601,55 @@ bool HistoryDocument::updateStatusText() const {
statusSize = _data->loadOffset();
} else if (_data->loaded()) {
statusSize = FileStatusSizeLoaded;
if (_data->isVoiceMessage()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
if (auto voice = Get<HistoryDocumentVoice>()) {
bool was = (voice->_playback != nullptr);
voice->ensurePlayback(this);
if (!was || state.position != voice->_playback->_position) {
auto prg = state.length ? snap(float64(state.position) / state.length, 0., 1.) : 0.;
if (voice->_playback->_position < state.position) {
voice->_playback->a_progress.start(prg);
} else {
voice->_playback->a_progress = anim::value(0., prg);
}
voice->_playback->_position = state.position;
voice->_playback->_a_progress.start();
}
voice->_lastDurationMs = static_cast<int>((state.length * 1000LL) / state.frequency); // Bad :(
}
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
if (auto voice = Get<HistoryDocumentVoice>()) {
voice->checkPlaybackFinished();
}
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Voice);
}
} else if (_data->isAudioFile()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, _parent->data()->fullId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Song);
}
}
} else {
statusSize = FileStatusSizeReady;
}
if (_data->isVoiceMessage()) {
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
if (auto voice = Get<HistoryDocumentVoice>()) {
bool was = (voice->_playback != nullptr);
voice->ensurePlayback(this);
if (!was || state.position != voice->_playback->_position) {
auto prg = state.length ? snap(float64(state.position) / state.length, 0., 1.) : 0.;
if (voice->_playback->_position < state.position) {
voice->_playback->a_progress.start(prg);
} else {
voice->_playback->a_progress = anim::value(0., prg);
}
voice->_playback->_position = state.position;
voice->_playback->_a_progress.start();
}
voice->_lastDurationMs = static_cast<int>((state.length * 1000LL) / state.frequency); // Bad :(
}
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
if (auto voice = Get<HistoryDocumentVoice>()) {
voice->checkPlaybackFinished();
}
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Voice);
}
} else if (_data->isAudioFile()) {
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
} else {
}
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId()))) {
showPause = Media::Player::instance()->isSeeking(AudioMsgId::Type::Song);
}
}
if (statusSize != _statusSize) {
setStatusSize(statusSize, realDuration);
}
@ -708,9 +689,9 @@ void HistoryDocument::clickHandlerPressedChanged(const ClickHandlerPtr &p, bool
if (pressed && p == voice->_seekl && !voice->seeking()) {
voice->startSeeking();
} else if (!pressed && voice->seeking()) {
auto type = AudioMsgId::Type::Voice;
auto state = Media::Player::mixer()->currentState(type);
if (state.id == AudioMsgId(_data, _parent->data()->fullId()) && state.length) {
const auto type = AudioMsgId::Type::Voice;
const auto state = Media::Player::instance()->getState(type);
if (state.id == AudioMsgId(_data, _parent->data()->fullId(), state.id.externalPlayId()) && state.length) {
auto currentProgress = voice->seekingCurrent();
auto currentPosition = state.frequency
? qRound(currentProgress * state.length * 1000. / state.frequency)

View File

@ -61,6 +61,12 @@ protected:
bool dataLoaded() const override;
private:
struct StateFromPlayback {
int statusSize = 0;
bool showPause = false;
int realDuration = 0;
};
QSize countOptimalSize() override;
QSize countCurrentSize(int newWidth) override;

View File

@ -13,6 +13,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio.h"
#include "media/clip/media_clip_reader.h"
#include "media/player/media_player_round_controller.h"
#include "media/player/media_player_instance.h"
#include "media/view/media_view_playback_progress.h"
#include "boxes/confirm_box.h"
#include "history/history_item_components.h"
@ -750,8 +751,8 @@ void HistoryGif::updateStatusText() const {
if (const auto video = activeRoundPlayer()) {
statusSize = -1 - _data->duration();
const auto state = Media::Player::mixer()->currentState(
AudioMsgId::Type::Voice);
const auto type = AudioMsgId::Type::Voice;
const auto state = Media::Player::instance()->getState(type);
if (state.id == video->audioMsgId() && state.length) {
auto position = int64(0);
if (Media::Player::IsStoppedAtEnd(state.state)) {

View File

@ -894,31 +894,24 @@ bool File::updateStatusText() const {
} else if (_document->loading()) {
statusSize = _document->loadOffset();
} else if (_document->loaded()) {
if (_document->isVoiceMessage()) {
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
} else if (_document->isAudioFile()) {
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
} else {
statusSize = FileStatusSizeLoaded;
}
statusSize = FileStatusSizeLoaded;
} else {
statusSize = FileStatusSizeReady;
}
if (_document->isVoiceMessage() || _document->isAudioFile()) {
const auto type = _document->isVoiceMessage() ? AudioMsgId::Type::Voice : AudioMsgId::Type::Song;
const auto state = Media::Player::instance()->getState(type);
if (state.id == AudioMsgId(_document, FullMsgId(), state.id.externalPlayId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId(), state.id.externalPlayId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
}
if (statusSize != _statusSize) {
int32 duration = _document->isSong()
? _document->song()->duration

View File

@ -366,11 +366,7 @@ MainWidget::MainWidget(
, _sideShadow(this)
, _dialogs(this, _controller)
, _history(this, _controller)
, _playerPlaylist(
this,
_controller,
Media::Player::Panel::Layout::OnlyPlaylist)
, _playerPanel(this, _controller, Media::Player::Panel::Layout::Full)
, _playerPlaylist(this, _controller)
, _noUpdatesTimer([=] { sendPing(); })
, _byPtsTimer([=] { getDifferenceByPts(); })
, _bySeqTimer([=] { getDifference(); })
@ -447,15 +443,6 @@ MainWidget::MainWidget(
}
});
_playerPanel->setPinCallback([this] { switchToFixedPlayer(); });
_playerPanel->setCloseCallback([this] { closeBothPlayers(); });
subscribe(Media::Player::instance()->titleButtonOver(), [this](bool over) {
if (over) {
_playerPanel->showFromOther();
} else {
_playerPanel->hideFromOther();
}
});
subscribe(Media::Player::instance()->playerWidgetOver(), [this](bool over) {
if (over) {
if (_playerPlaylist->isHidden()) {
@ -471,7 +458,7 @@ MainWidget::MainWidget(
});
subscribe(Media::Player::instance()->tracksFinishedNotifier(), [this](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Voice) {
auto songState = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
const auto songState = Media::Player::instance()->getState(AudioMsgId::Type::Song);
if (!songState.id || IsStoppedOrStopping(songState.state)) {
closeBothPlayers();
}
@ -1189,7 +1176,7 @@ void MainWidget::messagesAffected(
void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
using State = Media::Player::State;
const auto document = audioId.audio();
auto state = Media::Player::mixer()->currentState(audioId.type());
auto state = Media::Player::instance()->getState(audioId.type());
if (state.id == audioId && state.state == State::StoppedAtStart) {
state.state = State::Stopped;
Media::Player::mixer()->clearStoppedAtStart(audioId);
@ -1220,47 +1207,12 @@ void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
}
}
void MainWidget::switchToPanelPlayer() {
if (_playerUsingPanel) return;
_playerUsingPanel = true;
_player->hide(anim::type::normal);
_playerVolume.destroyDelayed();
_playerPlaylist->hideIgnoringEnterEvents();
Media::Player::instance()->usePanelPlayer().notify(true, true);
}
void MainWidget::switchToFixedPlayer() {
if (!_playerUsingPanel) return;
_playerUsingPanel = false;
if (!_player) {
createPlayer();
} else {
_player->show(anim::type::normal);
if (!_playerVolume) {
_playerVolume.create(this);
_player->entity()->volumeWidgetCreated(_playerVolume);
updateMediaPlayerPosition();
}
}
Media::Player::instance()->usePanelPlayer().notify(false, true);
_playerPanel->hideIgnoringEnterEvents();
}
void MainWidget::closeBothPlayers() {
if (_playerUsingPanel) {
_playerUsingPanel = false;
_player.destroyDelayed();
} else if (_player) {
if (_player) {
_player->hide(anim::type::normal);
}
_playerVolume.destroyDelayed();
Media::Player::instance()->usePanelPlayer().notify(false, true);
_playerPanel->hideIgnoringEnterEvents();
_playerPlaylist->hideIgnoringEnterEvents();
Media::Player::instance()->stop(AudioMsgId::Type::Voice);
Media::Player::instance()->stop(AudioMsgId::Type::Song);
@ -1269,9 +1221,6 @@ void MainWidget::closeBothPlayers() {
}
void MainWidget::createPlayer() {
if (_playerUsingPanel) {
return;
}
if (!_player) {
_player.create(this, object_ptr<Media::Player::Widget>(this));
rpl::merge(
@ -1314,7 +1263,7 @@ void MainWidget::playerHeightUpdated() {
updateControlsGeometry();
}
if (!_playerHeight && _player->isHidden()) {
auto state = Media::Player::mixer()->currentState(Media::Player::instance()->getActiveType());
const auto state = Media::Player::instance()->getState(Media::Player::instance()->getActiveType());
if (!state.id || Media::Player::IsStoppedOrStopping(state.state)) {
_playerVolume.destroyDelayed();
_player.destroyDelayed();
@ -2042,10 +1991,6 @@ Window::SectionSlideParams MainWidget::prepareShowAnimation(
if (playerVolumeVisible) {
_playerVolume->hide();
}
auto playerPanelVisible = !_playerPanel->isHidden();
if (playerPanelVisible) {
_playerPanel->hide();
}
auto playerPlaylistVisible = !_playerPlaylist->isHidden();
if (playerPlaylistVisible) {
_playerPlaylist->hide();
@ -2073,9 +2018,6 @@ Window::SectionSlideParams MainWidget::prepareShowAnimation(
if (playerVolumeVisible) {
_playerVolume->show();
}
if (playerPanelVisible) {
_playerPanel->show();
}
if (playerPlaylistVisible) {
_playerPlaylist->show();
}
@ -2330,7 +2272,6 @@ void MainWidget::orderWidgets() {
}
_connecting->raise();
_playerPlaylist->raise();
_playerPanel->raise();
floatPlayerRaiseAll();
if (_hider) _hider->raise();
}
@ -2352,10 +2293,6 @@ QPixmap MainWidget::grabForShowAnimation(const Window::SectionSlideParams &param
if (playerVolumeVisible) {
_playerVolume->hide();
}
auto playerPanelVisible = !_playerPanel->isHidden();
if (playerPanelVisible) {
_playerPanel->hide();
}
auto playerPlaylistVisible = !_playerPlaylist->isHidden();
if (playerPlaylistVisible) {
_playerPlaylist->hide();
@ -2386,9 +2323,6 @@ QPixmap MainWidget::grabForShowAnimation(const Window::SectionSlideParams &param
if (playerVolumeVisible) {
_playerVolume->show();
}
if (playerPanelVisible) {
_playerPanel->show();
}
if (playerPlaylistVisible) {
_playerPlaylist->show();
}
@ -2890,7 +2824,6 @@ void MainWidget::updateThirdColumnToCurrentChat(
}
void MainWidget::updateMediaPlayerPosition() {
_playerPanel->moveToRight(0, 0);
if (_player && _playerVolume) {
auto relativePosition = _player->entity()->getPositionForVolumeWidget();
auto playerMargins = _playerVolume->getMargin();
@ -3524,7 +3457,6 @@ void MainWidget::openPeerByName(
bool MainWidget::contentOverlapped(const QRect &globalRect) {
return (_history->contentOverlapped(globalRect)
|| _playerPanel->overlaps(globalRect)
|| _playerPlaylist->overlaps(globalRect)
|| (_playerVolume && _playerVolume->overlaps(globalRect)));
}

View File

@ -365,8 +365,6 @@ private:
void setupConnectingWidget();
void createPlayer();
void switchToPanelPlayer();
void switchToFixedPlayer();
void closeBothPlayers();
void playerHeightUpdated();
@ -501,7 +499,6 @@ private:
= { nullptr };
object_ptr<Media::Player::VolumeWidget> _playerVolume = { nullptr };
object_ptr<Media::Player::Panel> _playerPlaylist;
object_ptr<Media::Player::Panel> _playerPanel;
bool _playerUsingPanel = false;
base::unique_qptr<Window::HistoryHider> _hider;

View File

@ -422,7 +422,8 @@ void Mixer::Track::resetSpeedEffect() {
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::reattach(AudioMsgId::Type type) {
if (isStreamCreated() || !samplesCount[0]) {
if (isStreamCreated()
|| (!samplesCount[0] && !state.id.externalPlayId())) {
return;
}
@ -440,11 +441,13 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
&& (state.state != State::PausedAtEnd)
&& !state.waitingForData) {
alSourcef(stream.source, AL_GAIN, ComputeVolume(type));
LOG(("alSourcePlay: reattach for %1").arg(state.id.externalPlayId()));
alSourcePlay(stream.source);
if (IsPaused(state.state)) {
// We must always start the source if we want the AL_SAMPLE_OFFSET to be applied.
// Otherwise it won't be read by alGetSource and we'll get a corrupt position.
// So in case of a paused source we start it and then immediately pause it.
LOG(("alSourcePause: reattach for %1").arg(state.id.externalPlayId()));
alSourcePause(stream.source);
}
}
@ -475,7 +478,7 @@ void Mixer::Track::clear() {
bufferSamples[i] = QByteArray();
}
setVideoData(nullptr);
setExternalData(nullptr);
lastUpdateWhen = 0;
lastUpdatePosition = 0;
}
@ -553,11 +556,12 @@ int Mixer::Track::getNotQueuedBufferIndex() {
return -1;
}
void Mixer::Track::setVideoData(std::unique_ptr<VideoSoundData> data) {
void Mixer::Track::setExternalData(
std::unique_ptr<ExternalSoundData> data) {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
changeSpeedEffect(data ? data->speed : 1.);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
videoData = std::move(data);
externalData = std::move(data);
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
@ -640,8 +644,8 @@ Mixer::~Mixer() {
}
void Mixer::onUpdated(const AudioMsgId &audio) {
if (audio.playId()) {
videoSoundProgress(audio);
if (audio.externalPlayId()) {
externalSoundProgress(audio);
}
Media::Player::Updated().notify(audio);
}
@ -740,7 +744,9 @@ void Mixer::resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered
positionInBuffered = 0;
}
}
auto fullPosition = track->bufferedPosition + positionInBuffered;
auto fullPosition = track->samplesCount[0]
? (track->bufferedPosition + positionInBuffered)
: track->state.position;
track->state.position = fullPosition;
track->fadeStartPosition = fullPosition;
}
@ -776,9 +782,9 @@ void Mixer::play(const AudioMsgId &audio, crl::time positionMs) {
void Mixer::play(
const AudioMsgId &audio,
std::unique_ptr<VideoSoundData> videoData,
std::unique_ptr<ExternalSoundData> externalData,
crl::time positionMs) {
Expects(!videoData || audio.playId() != 0);
Expects((externalData != nullptr) == (audio.externalPlayId() != 0));
auto type = audio.type();
AudioMsgId stopped;
@ -839,24 +845,33 @@ void Mixer::play(
}
}
current->state.id = audio;
if (current->state.id != audio) {
current->started(); // Clear all previous state.
current->state.id = audio;
}
current->lastUpdateWhen = 0;
current->lastUpdatePosition = 0;
if (videoData) {
current->setVideoData(std::move(videoData));
if (externalData) {
current->setExternalData(std::move(externalData));
} else {
current->setVideoData(nullptr);
current->setExternalData(nullptr);
current->file = audio.audio()->location(true);
current->data = audio.audio()->data();
notLoadedYet = (current->file.isEmpty() && current->data.isEmpty());
}
if (notLoadedYet) {
auto newState = (type == AudioMsgId::Type::Song) ? State::Stopped : State::StoppedAtError;
auto newState = (type == AudioMsgId::Type::Song)
? State::Stopped
: State::StoppedAtError;
setStoppedState(current, newState);
} else {
current->state.position = (positionMs * current->state.frequency)
/ 1000LL;
current->state.state = current->videoData ? State::Paused : fadedStart ? State::Starting : State::Playing;
current->state.state = current->externalData
? State::Paused
: fadedStart
? State::Starting
: State::Playing;
current->loading = true;
emit loaderOnStart(current->state.id, positionMs);
if (type == AudioMsgId::Type::Voice) {
@ -879,63 +894,60 @@ void Mixer::play(
}
}
void Mixer::feedFromVideo(const VideoSoundPart &part) {
_loader->feedFromVideo(part);
void Mixer::feedFromExternal(ExternalSoundPart &&part) {
_loader->feedFromExternal(std::move(part));
}
void Mixer::forceToBufferVideo(const AudioMsgId &audioId) {
_loader->forceToBufferVideo(audioId);
void Mixer::forceToBufferExternal(const AudioMsgId &audioId) {
_loader->forceToBufferExternal(audioId);
}
void Mixer::setSpeedFromVideo(const AudioMsgId &audioId, float64 speed) {
void Mixer::setSpeedFromExternal(const AudioMsgId &audioId, float64 speed) {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
QMutexLocker lock(&AudioMutex);
const auto track = trackForType(AudioMsgId::Type::Video);
const auto track = trackForType(audioId.type());
if (track->state.id == audioId) {
track->changeSpeedEffect(speed);
}
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
}
Streaming::TimePoint Mixer::getVideoSyncTimePoint(
Streaming::TimePoint Mixer::getExternalSyncTimePoint(
const AudioMsgId &audio) const {
Expects(audio.type() == AudioMsgId::Type::Video);
Expects(audio.playId() != 0);
Expects(audio.externalPlayId() != 0);
auto result = Streaming::TimePoint();
const auto playId = audio.playId();
const auto type = audio.type();
QMutexLocker lock(&AudioMutex);
const auto track = trackForType(AudioMsgId::Type::Video);
if (track->state.id.playId() == playId && track->lastUpdateWhen > 0) {
const auto track = trackForType(type);
if (track && track->state.id == audio && track->lastUpdateWhen > 0) {
result.trackTime = track->lastUpdatePosition;
result.worldTime = track->lastUpdateWhen;
}
return result;
}
crl::time Mixer::getVideoCorrectedTime(const AudioMsgId &audio, crl::time frameMs, crl::time systemMs) {
crl::time Mixer::getExternalCorrectedTime(const AudioMsgId &audio, crl::time frameMs, crl::time systemMs) {
auto result = frameMs;
const auto type = audio.type();
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
auto track = trackForType(type);
const auto track = trackForType(type);
if (track && track->state.id == audio && track->lastUpdateWhen > 0) {
result = static_cast<crl::time>(track->lastUpdatePosition);
if (systemMs > track->lastUpdateWhen) {
result += (systemMs - track->lastUpdateWhen);
}
}
return result;
}
void Mixer::videoSoundProgress(const AudioMsgId &audio) {
auto type = audio.type();
void Mixer::externalSoundProgress(const AudioMsgId &audio) {
const auto type = audio.type();
QMutexLocker lock(&AudioMutex);
auto current = trackForType(type);
const auto current = trackForType(type);
if (current && current->state.length && current->state.frequency) {
if (current->state.id == audio && current->state.state == State::Playing) {
current->lastUpdateWhen = crl::now();
@ -947,7 +959,7 @@ void Mixer::videoSoundProgress(const AudioMsgId &audio) {
bool Mixer::checkCurrentALError(AudioMsgId::Type type) {
if (!Audio::PlaybackErrorHappened()) return true;
auto data = trackForType(type);
const auto data = trackForType(type);
if (!data) {
setStoppedState(data, State::StoppedAtError);
onError(data->state.id);
@ -1044,6 +1056,7 @@ void Mixer::resume(const AudioMsgId &audio, bool fast) {
alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL));
if (!checkCurrentALError(type)) return;
}
LOG(("alSourcePlay: resume for: %1").arg(track->state.id.externalPlayId()));
alSourcePlay(track->stream.source);
if (!checkCurrentALError(type)) return;
}
@ -1134,12 +1147,13 @@ void Mixer::stop(const AudioMsgId &audio) {
return;
}
current = track->state.id;
current = audio;
fadedStop(type);
if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong();
} else if (type == AudioMsgId::Type::Video) {
track->clear();
emit loaderOnCancel(audio);
}
}
if (current) emit updated(current);
@ -1153,11 +1167,13 @@ void Mixer::stop(const AudioMsgId &audio, State state) {
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
auto track = trackForType(type);
if (!track || track->state.id != audio || IsStopped(track->state.state)) {
if (!track
|| track->state.id != audio
|| IsStopped(track->state.state)) {
return;
}
current = track->state.id;
current = audio;
setStoppedState(track, state);
if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong();
@ -1247,6 +1263,7 @@ void Mixer::setStoppedState(Track *current, State state) {
alSourceStop(current->stream.source);
alSourcef(current->stream.source, AL_GAIN, 1);
}
emit loaderOnCancel(current->state.id);
}
void Mixer::clearStoppedAtStart(const AudioMsgId &audio) {
@ -1508,6 +1525,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
track->state.position = fullPosition;
emitSignals |= EmitPositionUpdated;
} else if (track->state.waitingForData && !waitingForDataOld) {
LOG(("WAITING FOR DATA FOR: %1.").arg(track->state.id.externalPlayId()));
if (fullPosition > track->state.position) {
track->state.position = fullPosition;
}
@ -1621,7 +1639,6 @@ void DetachFromDevice(not_null<Audio::Instance*> instance) {
} // namespace internal
} // namespace Player
} // namespace Media
class FFMpegAttributesReader : public AbstractFFMpegLoader {
public:
@ -1675,7 +1692,7 @@ public:
void trySet(QString &to, AVDictionary *dict, const char *key) {
if (!to.isEmpty()) return;
if (AVDictionaryEntry* tag = av_dict_get(dict, key, 0, 0)) {
if (AVDictionaryEntry* tag = av_dict_get(dict, key, nullptr, 0)) {
to = QString::fromUtf8(tag->value);
}
}
@ -1731,7 +1748,6 @@ private:
};
namespace Media {
namespace Player {
FileMediaInformation::Song PrepareForSending(const QString &fname, const QByteArray &data) {
@ -1748,7 +1764,6 @@ FileMediaInformation::Song PrepareForSending(const QString &fname, const QByteAr
}
} // namespace Player
} // namespace Media
class FFMpegWaveformCounter : public FFMpegLoader {
public:
@ -1834,8 +1849,12 @@ private:
};
VoiceWaveform audioCountWaveform(const FileLocation &file, const QByteArray &data) {
FFMpegWaveformCounter counter(file, data);
} // namespace Media
VoiceWaveform audioCountWaveform(
const FileLocation &file,
const QByteArray &data) {
Media::FFMpegWaveformCounter counter(file, data);
const auto positionMs = crl::time(0);
if (counter.open(positionMs)) {
return counter.waveform();

View File

@ -10,14 +10,18 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "storage/localimageloader.h"
#include "base/bytes.h"
struct VideoSoundData;
struct VideoSoundPart;
namespace Media {
struct ExternalSoundData;
struct ExternalSoundPart;
} // namespace Media
namespace Media {
namespace Streaming {
struct TimePoint;
} // namespace Streaming
} // namespace Media
namespace Media {
namespace Audio {
class Instance;
@ -126,7 +130,7 @@ public:
void play(const AudioMsgId &audio, crl::time positionMs = 0);
void play(
const AudioMsgId &audio,
std::unique_ptr<VideoSoundData> videoData,
std::unique_ptr<ExternalSoundData> externalData,
crl::time positionMs = 0);
void pause(const AudioMsgId &audio, bool fast = false);
void resume(const AudioMsgId &audio, bool fast = false);
@ -134,13 +138,13 @@ public:
void stop(const AudioMsgId &audio);
void stop(const AudioMsgId &audio, State state);
// Video player audio stream interface.
void feedFromVideo(const VideoSoundPart &part);
void forceToBufferVideo(const AudioMsgId &audioId);
void setSpeedFromVideo(const AudioMsgId &audioId, float64 speed);
Streaming::TimePoint getVideoSyncTimePoint(
// External player audio stream interface.
void feedFromExternal(ExternalSoundPart &&part);
void forceToBufferExternal(const AudioMsgId &audioId);
void setSpeedFromExternal(const AudioMsgId &audioId, float64 speed);
Streaming::TimePoint getExternalSyncTimePoint(
const AudioMsgId &audio) const;
crl::time getVideoCorrectedTime(
crl::time getExternalCorrectedTime(
const AudioMsgId &id,
crl::time frameMs,
crl::time systemMs);
@ -194,7 +198,7 @@ private:
void resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered = -1);
bool checkCurrentALError(AudioMsgId::Type type);
void videoSoundProgress(const AudioMsgId &audio);
void externalSoundProgress(const AudioMsgId &audio);
class Track {
public:
@ -212,7 +216,7 @@ private:
int getNotQueuedBufferIndex();
void setVideoData(std::unique_ptr<VideoSoundData> data);
void setExternalData(std::unique_ptr<ExternalSoundData> data);
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void changeSpeedEffect(float64 speed);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
@ -239,7 +243,7 @@ private:
uint32 buffers[kBuffersCount] = { 0 };
};
Stream stream;
std::unique_ptr<VideoSoundData> videoData;
std::unique_ptr<ExternalSoundData> externalData;
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
struct SpeedEffect {

View File

@ -9,9 +9,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/bytes.h"
namespace Media {
uint64_t AbstractFFMpegLoader::ComputeChannelLayout(
uint64_t channel_layout,
int channels) {
uint64_t channel_layout,
int channels) {
if (channel_layout) {
if (av_get_channel_layout_nb_channels(channel_layout) == channels) {
return channel_layout;
@ -32,13 +34,13 @@ bool AbstractFFMpegLoader::open(crl::time positionMs) {
int res = 0;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
ioBuffer = (uchar*)av_malloc(AVBlockSize);
ioBuffer = (uchar *)av_malloc(AVBlockSize);
if (!_data.isEmpty()) {
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void*>(this), &AbstractFFMpegLoader::_read_data, 0, &AbstractFFMpegLoader::_seek_data);
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void *>(this), &AbstractFFMpegLoader::_read_data, 0, &AbstractFFMpegLoader::_seek_data);
} else if (!_bytes.empty()) {
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void*>(this), &AbstractFFMpegLoader::_read_bytes, 0, &AbstractFFMpegLoader::_seek_bytes);
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void *>(this), &AbstractFFMpegLoader::_read_bytes, 0, &AbstractFFMpegLoader::_seek_bytes);
} else {
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void*>(this), &AbstractFFMpegLoader::_read_file, 0, &AbstractFFMpegLoader::_seek_file);
ioContext = avio_alloc_context(ioBuffer, AVBlockSize, 0, reinterpret_cast<void *>(this), &AbstractFFMpegLoader::_read_file, 0, &AbstractFFMpegLoader::_seek_file);
}
fmtContext = avformat_alloc_context();
if (!fmtContext) {
@ -97,7 +99,7 @@ AbstractFFMpegLoader::~AbstractFFMpegLoader() {
}
int AbstractFFMpegLoader::_read_data(void *opaque, uint8_t *buf, int buf_size) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque);
auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
auto nbytes = qMin(l->_data.size() - l->_dataPos, int32(buf_size));
if (nbytes <= 0) {
@ -110,7 +112,7 @@ int AbstractFFMpegLoader::_read_data(void *opaque, uint8_t *buf, int buf_size) {
}
int64_t AbstractFFMpegLoader::_seek_data(void *opaque, int64_t offset, int whence) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque);
auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
int32 newPos = -1;
switch (whence) {
@ -130,7 +132,7 @@ int64_t AbstractFFMpegLoader::_seek_data(void *opaque, int64_t offset, int whenc
}
int AbstractFFMpegLoader::_read_bytes(void *opaque, uint8_t *buf, int buf_size) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque);
auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
auto nbytes = qMin(static_cast<int>(l->_bytes.size()) - l->_dataPos, buf_size);
if (nbytes <= 0) {
@ -143,14 +145,15 @@ int AbstractFFMpegLoader::_read_bytes(void *opaque, uint8_t *buf, int buf_size)
}
int64_t AbstractFFMpegLoader::_seek_bytes(void *opaque, int64_t offset, int whence) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque);
auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
int32 newPos = -1;
switch (whence) {
case SEEK_SET: newPos = offset; break;
case SEEK_CUR: newPos = l->_dataPos + offset; break;
case SEEK_END: newPos = static_cast<int>(l->_bytes.size()) + offset; break;
case AVSEEK_SIZE: {
case AVSEEK_SIZE:
{
// Special whence for determining filesize without any seek.
return l->_bytes.size();
} break;
@ -163,18 +166,19 @@ int64_t AbstractFFMpegLoader::_seek_bytes(void *opaque, int64_t offset, int when
}
int AbstractFFMpegLoader::_read_file(void *opaque, uint8_t *buf, int buf_size) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque);
return int(l->_f.read((char*)(buf), buf_size));
auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
return int(l->_f.read((char *)(buf), buf_size));
}
int64_t AbstractFFMpegLoader::_seek_file(void *opaque, int64_t offset, int whence) {
auto l = reinterpret_cast<AbstractFFMpegLoader*>(opaque);
auto l = reinterpret_cast<AbstractFFMpegLoader *>(opaque);
switch (whence) {
case SEEK_SET: return l->_f.seek(offset) ? l->_f.pos() : -1;
case SEEK_CUR: return l->_f.seek(l->_f.pos() + offset) ? l->_f.pos() : -1;
case SEEK_END: return l->_f.seek(l->_f.size() + offset) ? l->_f.pos() : -1;
case AVSEEK_SIZE: {
case AVSEEK_SIZE:
{
// Special whence for determining filesize without any seek.
return l->_f.size();
} break;
@ -186,14 +190,14 @@ AbstractAudioFFMpegLoader::AbstractAudioFFMpegLoader(
const FileLocation &file,
const QByteArray &data,
bytes::vector &&buffer)
: AbstractFFMpegLoader(file, data, std::move(buffer)) {
_frame = av_frame_alloc();
: AbstractFFMpegLoader(file, data, std::move(buffer))
, _frame(Streaming::MakeFramePointer()) {
}
bool AbstractAudioFFMpegLoader::initUsingContext(
not_null<AVCodecContext*> context,
int64 initialCount,
int initialFrequency) {
not_null<AVCodecContext *> context,
int64 initialCount,
int initialFrequency) {
const auto layout = ComputeChannelLayout(
context->channel_layout,
context->channels);
@ -260,22 +264,20 @@ bool AbstractAudioFFMpegLoader::initUsingContext(
}
auto AbstractAudioFFMpegLoader::replaceFrameAndRead(
not_null<AVFrame*> frame,
Streaming::FramePointer frame,
QByteArray &result,
int64 &samplesAdded)
-> ReadResult {
av_frame_free(&_frame);
_frame = frame;
_frame = std::move(frame);
return readFromReadyFrame(result, samplesAdded);
}
auto AbstractAudioFFMpegLoader::readFromReadyContext(
not_null<AVCodecContext*> context,
not_null<AVCodecContext *> context,
QByteArray &result,
int64 &samplesAdded)
-> ReadResult {
av_frame_unref(_frame);
const auto res = avcodec_receive_frame(context, _frame);
const auto res = avcodec_receive_frame(context, _frame.get());
if (res >= 0) {
return readFromReadyFrame(result, samplesAdded);
}
@ -427,19 +429,19 @@ bool AbstractAudioFFMpegLoader::ensureResampleSpaceAvailable(int samples) {
}
void AbstractAudioFFMpegLoader::appendSamples(
QByteArray &result,
int64 &samplesAdded,
uint8_t **data,
int count) const {
QByteArray & result,
int64 & samplesAdded,
uint8_t * *data,
int count) const {
result.append(
reinterpret_cast<const char*>(data[0]),
reinterpret_cast<const char *>(data[0]),
count * _outputSampleSize);
samplesAdded += count;
}
AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
QByteArray &result,
int64 &samplesAdded) {
QByteArray & result,
int64 & samplesAdded) {
if (frameHasDesiredFormat()) {
appendSamples(
result,
@ -463,7 +465,7 @@ AudioPlayerLoader::ReadResult AbstractAudioFFMpegLoader::readFromReadyFrame(
_swrContext,
_swrDstData,
maxSamples,
(const uint8_t**)_frame->extended_data,
(const uint8_t * *)_frame->extended_data,
_frame->nb_samples);
if (samples < 0) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
@ -496,13 +498,12 @@ AbstractAudioFFMpegLoader::~AbstractAudioFFMpegLoader() {
}
av_freep(&_swrDstData);
}
av_frame_free(&_frame);
}
FFMpegLoader::FFMpegLoader(
const FileLocation &file,
const QByteArray &data,
bytes::vector &&buffer)
const FileLocation & file,
const QByteArray & data,
bytes::vector && buffer)
: AbstractAudioFFMpegLoader(file, data, std::move(buffer)) {
}
@ -582,8 +583,8 @@ bool FFMpegLoader::seekTo(crl::time positionMs) {
}
AudioPlayerLoader::ReadResult FFMpegLoader::readMore(
QByteArray &result,
int64 &samplesAdded) {
QByteArray & result,
int64 & samplesAdded) {
const auto readResult = readFromReadyContext(
_codecContext,
result,
@ -641,3 +642,5 @@ FFMpegLoader::~FFMpegLoader() {
avcodec_free_context(&_codecContext);
}
}
} // namespace Media

View File

@ -9,6 +9,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio.h"
#include "media/audio/media_audio_loader.h"
#include "media/streaming/media_streaming_utility.h"
extern "C" {
#include <libavcodec/avcodec.h>
@ -19,6 +20,8 @@ extern "C" {
#include <AL/al.h>
namespace Media {
class AbstractFFMpegLoader : public AudioPlayerLoader {
public:
AbstractFFMpegLoader(
@ -91,18 +94,18 @@ public:
protected:
bool initUsingContext(
not_null<AVCodecContext*> context,
not_null<AVCodecContext *> context,
int64 initialCount,
int initialFrequency);
ReadResult readFromReadyContext(
not_null<AVCodecContext*> context,
not_null<AVCodecContext *> context,
QByteArray &result,
int64 &samplesAdded);
// Streaming player provides the first frame to the ChildFFMpegLoader
// so we replace our allocated frame with the one provided.
ReadResult replaceFrameAndRead(
not_null<AVFrame*> frame,
Streaming::FramePointer frame,
QByteArray &result,
int64 &samplesAdded);
@ -123,7 +126,7 @@ private:
uint8_t **data,
int count) const;
AVFrame *_frame = nullptr;
Streaming::FramePointer _frame;
int _outputFormat = AL_FORMAT_STEREO16;
int _outputChannels = 2;
int _outputSampleSize = 2 * sizeof(uint16);
@ -164,3 +167,5 @@ private:
AVPacket _packet;
};
} // namespace Media

View File

@ -7,7 +7,12 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "media/audio/media_audio_loader.h"
AudioPlayerLoader::AudioPlayerLoader(const FileLocation &file, const QByteArray &data, bytes::vector &&buffer)
namespace Media {
AudioPlayerLoader::AudioPlayerLoader(
const FileLocation &file,
const QByteArray &data,
bytes::vector &&buffer)
: _file(file)
, _data(data)
, _bytes(std::move(buffer)) {
@ -20,23 +25,31 @@ AudioPlayerLoader::~AudioPlayerLoader() {
}
}
bool AudioPlayerLoader::check(const FileLocation &file, const QByteArray &data) {
return this->_file == file && this->_data.size() == data.size();
bool AudioPlayerLoader::check(
const FileLocation &file,
const QByteArray &data) {
return (this->_file == file) && (this->_data.size() == data.size());
}
void AudioPlayerLoader::saveDecodedSamples(QByteArray *samples, int64 *samplesCount) {
Assert(_savedSamplesCount == 0);
Assert(_savedSamples.isEmpty());
Assert(!_holdsSavedSamples);
void AudioPlayerLoader::saveDecodedSamples(
not_null<QByteArray*> samples,
not_null<int64*> samplesCount) {
Expects(_savedSamplesCount == 0);
Expects(_savedSamples.isEmpty());
Expects(!_holdsSavedSamples);
samples->swap(_savedSamples);
std::swap(*samplesCount, _savedSamplesCount);
_holdsSavedSamples = true;
}
void AudioPlayerLoader::takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount) {
Assert(*samplesCount == 0);
Assert(samples->isEmpty());
Assert(_holdsSavedSamples);
void AudioPlayerLoader::takeSavedDecodedSamples(
not_null<QByteArray*> samples,
not_null<int64*> samplesCount) {
Expects(*samplesCount == 0);
Expects(samples->isEmpty());
Expects(_holdsSavedSamples);
samples->swap(_savedSamples);
std::swap(*samplesCount, _savedSamplesCount);
_holdsSavedSamples = false;
@ -51,17 +64,29 @@ bool AudioPlayerLoader::openFile() {
if (_f.isOpen()) _f.close();
if (!_access) {
if (!_file.accessEnable()) {
LOG(("Audio Error: could not open file access '%1', data size '%2', error %3, %4").arg(_file.name()).arg(_data.size()).arg(_f.error()).arg(_f.errorString()));
LOG(("Audio Error: could not open file access '%1', "
"data size '%2', error %3, %4"
).arg(_file.name()
).arg(_data.size()
).arg(_f.error()
).arg(_f.errorString()));
return false;
}
_access = true;
}
_f.setFileName(_file.name());
if (!_f.open(QIODevice::ReadOnly)) {
LOG(("Audio Error: could not open file '%1', data size '%2', error %3, %4").arg(_file.name()).arg(_data.size()).arg(_f.error()).arg(_f.errorString()));
LOG(("Audio Error: could not open file '%1', "
"data size '%2', error %3, %4"
).arg(_file.name()
).arg(_data.size()
).arg(_f.error()
).arg(_f.errorString()));
return false;
}
}
_dataPos = 0;
return true;
}
} // namespace Media

View File

@ -8,10 +8,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#pragma once
#include "base/bytes.h"
#include "media/streaming/media_streaming_utility.h"
namespace FFMpeg {
struct AVPacketDataWrap;
} // namespace FFMpeg
namespace Media {
class AudioPlayerLoader {
public:
@ -35,9 +34,10 @@ public:
Wait,
EndOfFile,
};
virtual ReadResult readMore(QByteArray &samples, int64 &samplesCount) = 0;
virtual void enqueuePackets(
QQueue<FFMpeg::AVPacketDataWrap> &&packets) {
virtual ReadResult readMore(
QByteArray &samples,
int64 &samplesCount) = 0;
virtual void enqueuePackets(std::deque<Streaming::Packet> &&packets) {
Unexpected("enqueuePackets() call on not ChildFFMpegLoader.");
}
virtual void setForceToBuffer(bool force) {
@ -47,8 +47,12 @@ public:
return false;
}
void saveDecodedSamples(QByteArray *samples, int64 *samplesCount);
void takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount);
void saveDecodedSamples(
not_null<QByteArray*> samples,
not_null<int64*> samplesCount);
void takeSavedDecodedSamples(
not_null<QByteArray*> samples,
not_null<int64*> samplesCount);
bool holdsSavedDecodedSamples() const;
protected:
@ -68,3 +72,5 @@ private:
bool _holdsSavedSamples = false;
};
} // namespace Media

View File

@ -20,46 +20,46 @@ constexpr auto kPlaybackBufferSize = 256 * 1024;
} // namespace
Loaders::Loaders(QThread *thread)
: _fromVideoNotify([=] { videoSoundAdded(); }) {
: _fromExternalNotify([=] { videoSoundAdded(); }) {
moveToThread(thread);
_fromVideoNotify.moveToThread(thread);
_fromExternalNotify.moveToThread(thread);
connect(thread, SIGNAL(started()), this, SLOT(onInit()));
connect(thread, SIGNAL(finished()), this, SLOT(deleteLater()));
}
void Loaders::feedFromVideo(const VideoSoundPart &part) {
void Loaders::feedFromExternal(ExternalSoundPart &&part) {
auto invoke = false;
{
QMutexLocker lock(&_fromVideoMutex);
invoke = _fromVideoQueues.empty()
&& _fromVideoForceToBuffer.empty();
_fromVideoQueues[part.audio].enqueue(FFMpeg::dataWrapFromPacket(*part.packet));
QMutexLocker lock(&_fromExternalMutex);
invoke = _fromExternalQueues.empty()
&& _fromExternalForceToBuffer.empty();
_fromExternalQueues[part.audio].push_back(std::move(part.packet));
}
if (invoke) {
_fromVideoNotify.call();
_fromExternalNotify.call();
}
}
void Loaders::forceToBufferVideo(const AudioMsgId &audioId) {
void Loaders::forceToBufferExternal(const AudioMsgId &audioId) {
auto invoke = false;
{
QMutexLocker lock(&_fromVideoMutex);
invoke = _fromVideoQueues.empty()
&& _fromVideoForceToBuffer.empty();
_fromVideoForceToBuffer.emplace(audioId);
QMutexLocker lock(&_fromExternalMutex);
invoke = _fromExternalQueues.empty()
&& _fromExternalForceToBuffer.empty();
_fromExternalForceToBuffer.emplace(audioId);
}
if (invoke) {
_fromVideoNotify.call();
_fromExternalNotify.call();
}
}
void Loaders::videoSoundAdded() {
auto queues = decltype(_fromVideoQueues)();
auto forces = decltype(_fromVideoForceToBuffer)();
auto queues = decltype(_fromExternalQueues)();
auto forces = decltype(_fromExternalForceToBuffer)();
{
QMutexLocker lock(&_fromVideoMutex);
queues = base::take(_fromVideoQueues);
forces = base::take(_fromVideoForceToBuffer);
QMutexLocker lock(&_fromExternalMutex);
queues = base::take(_fromExternalQueues);
forces = base::take(_fromExternalForceToBuffer);
}
for (const auto &audioId : forces) {
const auto tryLoader = [&](const auto &id, auto &loader) {
@ -90,34 +90,9 @@ void Loaders::videoSoundAdded() {
}
return false;
};
const auto used = tryLoader(_audio, _audioLoader)
tryLoader(_audio, _audioLoader)
|| tryLoader(_song, _songLoader)
|| tryLoader(_video, _videoLoader);
if (!used) {
for (auto &packetData : packets) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}
}
}
Loaders::~Loaders() {
QMutexLocker lock(&_fromVideoMutex);
clearFromVideoQueue();
}
void Loaders::clearFromVideoQueue() {
auto queues = base::take(_fromVideoQueues);
for (auto &pair : queues) {
const auto audioId = pair.first;
auto &packets = pair.second;
for (auto &packetData : packets) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}
}
@ -144,7 +119,13 @@ AudioMsgId Loaders::clear(AudioMsgId::Type type) {
AudioMsgId result;
switch (type) {
case AudioMsgId::Type::Voice: std::swap(result, _audio); _audioLoader = nullptr; break;
case AudioMsgId::Type::Song: std::swap(result, _song); _songLoader = nullptr; break;
case AudioMsgId::Type::Song:
if (_songLoader) {
LOG(("SONG LOADER KILLED FOR: %1.").arg(_song.externalPlayId()));
}
std::swap(result, _song);
_songLoader = nullptr;
break;
case AudioMsgId::Type::Video: std::swap(result, _video); _videoLoader = nullptr; break;
}
return result;
@ -324,6 +305,7 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
return;
}
}
LOG(("alSourcePlay: loader for: %1").arg(track->state.id.externalPlayId()));
alSourcePlay(track->stream.source);
if (!internal::audioCheckError()) {
setStoppedState(track, State::StoppedAtError);
@ -371,17 +353,21 @@ AudioPlayerLoader *Loaders::setupLoader(
case AudioMsgId::Type::Video: _video = audio; loader = &_videoLoader; break;
}
if (audio.playId()) {
if (!track->videoData) {
if (audio.externalPlayId()) {
if (!track->externalData) {
clear(audio.type());
track->state.state = State::StoppedAtError;
emit error(audio);
LOG(("Audio Error: video sound data not ready"));
return nullptr;
}
*loader = std::make_unique<ChildFFMpegLoader>(std::move(track->videoData));
*loader = std::make_unique<ChildFFMpegLoader>(
std::move(track->externalData));
} else {
*loader = std::make_unique<FFMpegLoader>(track->file, track->data, bytes::vector());
*loader = std::make_unique<FFMpegLoader>(
track->file,
track->data,
bytes::vector());
}
l = loader->get();
@ -444,5 +430,7 @@ void Loaders::onCancel(const AudioMsgId &audio) {
}
}
Loaders::~Loaders() = default;
} // namespace Player
} // namespace Media

View File

@ -21,15 +21,15 @@ class Loaders : public QObject {
public:
Loaders(QThread *thread);
void feedFromVideo(const VideoSoundPart &part);
void forceToBufferVideo(const AudioMsgId &audioId);
void feedFromExternal(ExternalSoundPart &&part);
void forceToBufferExternal(const AudioMsgId &audioId);
~Loaders();
signals:
void error(const AudioMsgId &audio);
void needToCheck();
public slots:
public slots:
void onInit();
void onStart(const AudioMsgId &audio, qint64 positionMs);
@ -38,17 +38,18 @@ signals:
private:
void videoSoundAdded();
void clearFromVideoQueue();
AudioMsgId _audio, _song, _video;
std::unique_ptr<AudioPlayerLoader> _audioLoader;
std::unique_ptr<AudioPlayerLoader> _songLoader;
std::unique_ptr<AudioPlayerLoader> _videoLoader;
QMutex _fromVideoMutex;
base::flat_map<AudioMsgId, QQueue<FFMpeg::AVPacketDataWrap>> _fromVideoQueues;
base::flat_set<AudioMsgId> _fromVideoForceToBuffer;
SingleQueuedInvokation _fromVideoNotify;
QMutex _fromExternalMutex;
base::flat_map<
AudioMsgId,
std::deque<Streaming::Packet>> _fromExternalQueues;
base::flat_set<AudioMsgId> _fromExternalForceToBuffer;
SingleQueuedInvokation _fromExternalNotify;
void emitError(AudioMsgId::Type type);
AudioMsgId clear(AudioMsgId::Type type);

View File

@ -9,6 +9,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/crash_reports.h"
namespace Media {
namespace {
constexpr AVSampleFormat AudioToFormat = AV_SAMPLE_FMT_S16;
@ -26,27 +27,19 @@ bool IsPlanarFormat(int format) {
} // namespace
VideoSoundData::~VideoSoundData() {
if (frame) {
av_frame_free(&frame);
}
if (context) {
avcodec_close(context);
avcodec_free_context(&context);
}
}
ChildFFMpegLoader::ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data)
ChildFFMpegLoader::ChildFFMpegLoader(
std::unique_ptr<ExternalSoundData> &&data)
: AbstractAudioFFMpegLoader(
FileLocation(),
QByteArray(),
bytes::vector())
, _parentData(std::move(data)) {
Expects(_parentData->codec != nullptr);
}
bool ChildFFMpegLoader::open(crl::time positionMs) {
return initUsingContext(
_parentData->context,
_parentData->codec.get(),
_parentData->length,
_parentData->frequency);
}
@ -64,8 +57,8 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readFromInitialFrame(
}
AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
QByteArray &result,
int64 &samplesAdded) {
QByteArray & result,
int64 & samplesAdded) {
const auto initialFrameResult = readFromInitialFrame(
result,
samplesAdded);
@ -74,32 +67,37 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
}
const auto readResult = readFromReadyContext(
_parentData->context,
_parentData->codec.get(),
result,
samplesAdded);
if (readResult != ReadResult::Wait) {
return readResult;
}
if (_queue.isEmpty()) {
if (_queue.empty()) {
return _eofReached ? ReadResult::EndOfFile : ReadResult::Wait;
}
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, _queue.dequeue());
auto packet = std::move(_queue.front());
_queue.pop_front();
_eofReached = FFMpeg::isNullPacket(packet);
_eofReached = packet.empty();
if (_eofReached) {
avcodec_send_packet(_parentData->context, nullptr); // drain
avcodec_send_packet(_parentData->codec.get(), nullptr); // drain
return ReadResult::Ok;
}
auto res = avcodec_send_packet(_parentData->context, &packet);
auto res = avcodec_send_packet(
_parentData->codec.get(),
&packet.fields());
if (res < 0) {
FFMpeg::freePacket(&packet);
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Audio Error: Unable to avcodec_send_packet() file '%1', data size '%2', error %3, %4").arg(_file.name()).arg(_data.size()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
LOG(("Audio Error: Unable to avcodec_send_packet() file '%1', "
"data size '%2', error %3, %4"
).arg(_file.name()
).arg(_data.size()
).arg(res
).arg(av_make_error_string(err, sizeof(err), res)));
// There is a sample voice message where skipping such packet
// results in a crash (read_access to nullptr) in swr_convert().
if (res == AVERROR_INVALIDDATA) {
@ -107,16 +105,18 @@ AudioPlayerLoader::ReadResult ChildFFMpegLoader::readMore(
}
return ReadResult::Error;
}
FFMpeg::freePacket(&packet);
return ReadResult::Ok;
}
void ChildFFMpegLoader::enqueuePackets(
QQueue<FFMpeg::AVPacketDataWrap> &&packets) {
std::deque<Streaming::Packet> &&packets) {
if (_queue.empty()) {
_queue = std::move(packets);
} else {
_queue += std::move(packets);
_queue.insert(
end(_queue),
std::make_move_iterator(packets.begin()),
std::make_move_iterator(packets.end()));
}
packets.clear();
}
@ -129,10 +129,6 @@ bool ChildFFMpegLoader::forceToBuffer() const {
return _forceToBuffer;
}
ChildFFMpegLoader::~ChildFFMpegLoader() {
for (auto &packetData : base::take(_queue)) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}
ChildFFMpegLoader::~ChildFFMpegLoader() = default;
} // namespace Media

View File

@ -8,60 +8,26 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#pragma once
#include "media/audio/media_audio_ffmpeg_loader.h"
#include "media/streaming/media_streaming_utility.h"
struct VideoSoundData {
AVCodecContext *context = nullptr;
AVFrame *frame = nullptr;
namespace Media {
struct ExternalSoundData {
Streaming::CodecPointer codec;
Streaming::FramePointer frame;
int32 frequency = Media::Player::kDefaultFrequency;
int64 length = 0;
float64 speed = 1.; // 0.5 <= speed <= 2.
~VideoSoundData();
};
struct VideoSoundPart {
const AVPacket *packet = nullptr;
struct ExternalSoundPart {
AudioMsgId audio;
Streaming::Packet packet;
};
namespace FFMpeg {
// AVPacket has a deprecated field, so when you copy an AVPacket
// variable (e.g. inside QQueue), a compile warning is emitted.
// We wrap full AVPacket data in a new AVPacketDataWrap struct.
// All other fields are copied from AVPacket without modifications.
struct AVPacketDataWrap {
char __data[sizeof(AVPacket)];
};
inline void packetFromDataWrap(AVPacket &packet, const AVPacketDataWrap &data) {
memcpy(&packet, &data, sizeof(data));
}
inline AVPacketDataWrap dataWrapFromPacket(const AVPacket &packet) {
AVPacketDataWrap data;
memcpy(&data, &packet, sizeof(data));
return data;
}
inline bool isNullPacket(const AVPacket &packet) {
return packet.data == nullptr && packet.size == 0;
}
inline bool isNullPacket(const AVPacket *packet) {
return isNullPacket(*packet);
}
inline void freePacket(AVPacket *packet) {
if (!isNullPacket(packet)) {
av_packet_unref(packet);
}
}
} // namespace FFMpeg
class ChildFFMpegLoader : public AbstractAudioFFMpegLoader {
public:
ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data);
ChildFFMpegLoader(std::unique_ptr<ExternalSoundData> &&data);
bool open(crl::time positionMs) override;
@ -70,8 +36,7 @@ public:
}
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override;
void enqueuePackets(
QQueue<FFMpeg::AVPacketDataWrap> &&packets) override;
void enqueuePackets(std::deque<Streaming::Packet> &&packets) override;
void setForceToBuffer(bool force) override;
bool forceToBuffer() const override;
@ -89,9 +54,11 @@ private:
QByteArray &result,
int64 &samplesAdded);
std::unique_ptr<VideoSoundData> _parentData;
QQueue<FFMpeg::AVPacketDataWrap> _queue;
std::unique_ptr<ExternalSoundData> _parentData;
std::deque<Streaming::Packet> _queue;
bool _forceToBuffer = false;
bool _eofReached = false;
};
} // namespace Media

View File

@ -44,26 +44,25 @@ bool isAlignedImage(const QImage &image) {
} // namespace
FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data, const AudioMsgId &audio) : ReaderImplementation(location, data)
FFMpegReaderImplementation::FFMpegReaderImplementation(
FileLocation *location,
QByteArray *data,
const AudioMsgId &audio)
: ReaderImplementation(location, data)
, _frame(Streaming::MakeFramePointer())
, _audioMsgId(audio) {
_frame = av_frame_alloc();
}
ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
if (_frameRead) {
av_frame_unref(_frame);
_frameRead = false;
}
do {
int res = avcodec_receive_frame(_codecContext, _frame);
int res = avcodec_receive_frame(_codecContext, _frame.get());
if (res >= 0) {
processReadFrame();
return ReadResult::Success;
}
if (res == AVERROR_EOF) {
clearPacketQueue();
_packetQueue.clear();
if (_mode == Mode::Normal) {
return ReadResult::EndOfFile;
}
@ -96,7 +95,7 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
return ReadResult::Error;
}
while (_packetQueue.isEmpty()) {
while (_packetQueue.empty()) {
auto packetResult = readAndProcessPacket();
if (packetResult == PacketResult::Error) {
return ReadResult::Error;
@ -104,19 +103,27 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
break;
}
}
if (_packetQueue.isEmpty()) {
if (_packetQueue.empty()) {
avcodec_send_packet(_codecContext, nullptr); // drain
continue;
}
startPacket();
auto packet = std::move(_packetQueue.front());
_packetQueue.pop_front();
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, _packetQueue.head());
res = avcodec_send_packet(_codecContext, &packet);
const auto native = &packet.fields();
const auto guard = gsl::finally([
&,
size = native->size,
data = native->data
] {
native->size = size;
native->data = data;
packet = Streaming::Packet();
});
res = avcodec_send_packet(_codecContext, native);
if (res < 0) {
finishPacket();
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to avcodec_send_packet() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
if (res == AVERROR_INVALIDDATA) {
@ -126,7 +133,6 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
}
return ReadResult::Error;
}
finishPacket();
} while (true);
return ReadResult::Error;
@ -171,7 +177,12 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(crl:
}
// sync by audio stream
auto correctMs = (frameMs >= 0) ? Player::mixer()->getVideoCorrectedTime(_audioMsgId, frameMs, systemMs) : frameMs;
auto correctMs = (frameMs >= 0)
? Player::mixer()->getExternalCorrectedTime(
_audioMsgId,
frameMs,
systemMs)
: frameMs;
if (!_frameRead) {
auto readResult = readNextFrame();
if (readResult != ReadResult::Success) {
@ -232,7 +243,7 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
} else {
if ((_swsSize != toSize) || (_frame->format != -1 && _frame->format != _codecContext->pix_fmt) || !_swsContext) {
_swsSize = toSize;
_swsContext = sws_getCachedContext(_swsContext, _frame->width, _frame->height, AVPixelFormat(_frame->format), toSize.width(), toSize.height(), AV_PIX_FMT_BGRA, 0, 0, 0, 0);
_swsContext = sws_getCachedContext(_swsContext, _frame->width, _frame->height, AVPixelFormat(_frame->format), toSize.width(), toSize.height(), AV_PIX_FMT_BGRA, 0, nullptr, nullptr, nullptr);
}
// AV_NUM_DATA_POINTERS defined in AVFrame struct
uint8_t *toData[AV_NUM_DATA_POINTERS] = { to.bits(), nullptr };
@ -264,7 +275,8 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
}
}
av_frame_unref(_frame);
Streaming::ClearFrameMemory(_frame.get());
return true;
}
@ -286,7 +298,7 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
return false;
}
_ioBuffer = (uchar*)av_malloc(AVBlockSize);
_ioContext = avio_alloc_context(_ioBuffer, AVBlockSize, 0, static_cast<void*>(this), &FFMpegReaderImplementation::_read, 0, &FFMpegReaderImplementation::_seek);
_ioContext = avio_alloc_context(_ioBuffer, AVBlockSize, 0, static_cast<void*>(this), &FFMpegReaderImplementation::_read, nullptr, &FFMpegReaderImplementation::_seek);
_fmtContext = avformat_alloc_context();
if (!_fmtContext) {
LOG(("Gif Error: Unable to avformat_alloc_context %1").arg(logData()));
@ -296,26 +308,26 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
int res = 0;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
if ((res = avformat_open_input(&_fmtContext, 0, 0, 0)) < 0) {
_ioBuffer = 0;
if ((res = avformat_open_input(&_fmtContext, nullptr, nullptr, nullptr)) < 0) {
_ioBuffer = nullptr;
LOG(("Gif Error: Unable to avformat_open_input %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
_opened = true;
if ((res = avformat_find_stream_info(_fmtContext, 0)) < 0) {
if ((res = avformat_find_stream_info(_fmtContext, nullptr)) < 0) {
LOG(("Gif Error: Unable to avformat_find_stream_info %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
_streamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0);
_streamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
if (_streamId < 0) {
LOG(("Gif Error: Unable to av_find_best_stream %1, error %2, %3").arg(logData()).arg(_streamId).arg(av_make_error_string(err, sizeof(err), _streamId)));
return false;
}
auto rotateTag = av_dict_get(_fmtContext->streams[_streamId]->metadata, "rotate", NULL, 0);
auto rotateTag = av_dict_get(_fmtContext->streams[_streamId]->metadata, "rotate", nullptr, 0);
if (rotateTag && *rotateTag->value) {
auto stringRotateTag = QString::fromUtf8(rotateTag->value);
auto toIntSucceeded = false;
@ -339,20 +351,20 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
const auto codec = avcodec_find_decoder(_codecContext->codec_id);
_audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0);
_audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0);
if (_mode == Mode::Inspecting) {
_hasAudioStream = (_audioStreamId >= 0);
_audioStreamId = -1;
} else if (_mode == Mode::Silent || !_audioMsgId.playId()) {
} else if (_mode == Mode::Silent || !_audioMsgId.externalPlayId()) {
_audioStreamId = -1;
}
if ((res = avcodec_open2(_codecContext, codec, 0)) < 0) {
if ((res = avcodec_open2(_codecContext, codec, nullptr)) < 0) {
LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
std::unique_ptr<VideoSoundData> soundData;
std::unique_ptr<ExternalSoundData> soundData;
if (_audioStreamId >= 0) {
auto audioContext = avcodec_alloc_context3(nullptr);
if (!audioContext) {
@ -372,8 +384,8 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
_audioStreamId = -1;
} else {
soundData = std::make_unique<VideoSoundData>();
soundData->context = audioContext;
soundData = std::make_unique<ExternalSoundData>();
soundData->codec = Streaming::CodecPointer(audioContext);
soundData->frequency = _fmtContext->streams[_audioStreamId]->codecpar->sample_rate;
if (_fmtContext->streams[_audioStreamId]->duration == AV_NOPTS_VALUE) {
soundData->length = (_fmtContext->duration * soundData->frequency) / AV_TIME_BASE;
@ -393,10 +405,10 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
}
}
AVPacket packet;
auto readResult = readPacket(&packet);
Streaming::Packet packet;
auto readResult = readPacket(packet);
if (readResult == PacketResult::Ok && positionMs > 0) {
positionMs = countPacketMs(&packet);
positionMs = countPacketMs(packet);
}
if (hasAudio()) {
@ -404,7 +416,7 @@ bool FFMpegReaderImplementation::start(Mode mode, crl::time &positionMs) {
}
if (readResult == PacketResult::Ok) {
processPacket(&packet);
processPacket(std::move(packet));
}
return true;
@ -424,14 +436,14 @@ bool FFMpegReaderImplementation::inspectAt(crl::time &positionMs) {
_packetQueue.clear();
AVPacket packet;
auto readResult = readPacket(&packet);
Streaming::Packet packet;
auto readResult = readPacket(packet);
if (readResult == PacketResult::Ok && positionMs > 0) {
positionMs = countPacketMs(&packet);
positionMs = countPacketMs(packet);
}
if (readResult == PacketResult::Ok) {
processPacket(&packet);
processPacket(std::move(packet));
}
return true;
@ -455,12 +467,6 @@ QString FFMpegReaderImplementation::logData() const {
}
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
clearPacketQueue();
if (_frameRead) {
av_frame_unref(_frame);
_frameRead = false;
}
if (_codecContext) avcodec_free_context(&_codecContext);
if (_swsContext) sws_freeContext(_swsContext);
if (_opened) {
@ -473,24 +479,18 @@ FFMpegReaderImplementation::~FFMpegReaderImplementation() {
av_freep(&_ioBuffer);
}
if (_fmtContext) avformat_free_context(_fmtContext);
av_frame_free(&_frame);
}
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(AVPacket *packet) {
av_init_packet(packet);
packet->data = nullptr;
packet->size = 0;
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(Streaming::Packet &packet) {
int res = 0;
if ((res = av_read_frame(_fmtContext, packet)) < 0) {
if ((res = av_read_frame(_fmtContext, &packet.fields())) < 0) {
if (res == AVERROR_EOF) {
if (_audioStreamId >= 0) {
// queue terminating packet to audio player
auto drain = AVPacket();
av_init_packet(&drain);
drain.data = nullptr;
drain.size = 0;
Player::mixer()->feedFromVideo({ &drain, _audioMsgId });
Player::mixer()->feedFromExternal({
_audioMsgId,
Streaming::Packet()
});
}
return PacketResult::EndOfFile;
}
@ -501,72 +501,44 @@ FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(
return PacketResult::Ok;
}
void FFMpegReaderImplementation::processPacket(AVPacket *packet) {
auto videoPacket = (packet->stream_index == _streamId);
auto audioPacket = (_audioStreamId >= 0 && packet->stream_index == _audioStreamId);
void FFMpegReaderImplementation::processPacket(Streaming::Packet &&packet) {
const auto &native = packet.fields();
auto videoPacket = (native.stream_index == _streamId);
auto audioPacket = (_audioStreamId >= 0 && native.stream_index == _audioStreamId);
if (audioPacket || videoPacket) {
if (videoPacket) {
_lastReadVideoMs = countPacketMs(packet);
_packetQueue.enqueue(FFMpeg::dataWrapFromPacket(*packet));
_packetQueue.push_back(std::move(packet));
} else if (audioPacket) {
_lastReadAudioMs = countPacketMs(packet);
// queue packet to audio player
Player::mixer()->feedFromVideo({ packet, _audioMsgId });
Player::mixer()->feedFromExternal({
_audioMsgId,
std::move(packet)
});
}
} else {
av_packet_unref(packet);
}
}
crl::time FFMpegReaderImplementation::countPacketMs(AVPacket *packet) const {
int64 packetPts = (packet->pts == AV_NOPTS_VALUE) ? packet->dts : packet->pts;
crl::time packetMs = (packetPts * 1000LL * _fmtContext->streams[packet->stream_index]->time_base.num) / _fmtContext->streams[packet->stream_index]->time_base.den;
crl::time FFMpegReaderImplementation::countPacketMs(
const Streaming::Packet &packet) const {
const auto &native = packet.fields();
int64 packetPts = (native.pts == AV_NOPTS_VALUE) ? native.dts : native.pts;
crl::time packetMs = (packetPts * 1000LL * _fmtContext->streams[native.stream_index]->time_base.num) / _fmtContext->streams[native.stream_index]->time_base.den;
return packetMs;
}
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readAndProcessPacket() {
AVPacket packet;
auto result = readPacket(&packet);
Streaming::Packet packet;
auto result = readPacket(packet);
if (result == PacketResult::Ok) {
processPacket(&packet);
processPacket(std::move(packet));
}
return result;
}
void FFMpegReaderImplementation::startPacket() {
if (!_packetStarted && !_packetQueue.isEmpty()) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, _packetQueue.head());
_packetStartedSize = packet.size;
_packetStartedData = packet.data;
_packetStarted = true;
}
}
void FFMpegReaderImplementation::finishPacket() {
if (_packetStarted) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, _packetQueue.head());
packet.size = _packetStartedSize;
packet.data = _packetStartedData;
_packetStarted = false;
av_packet_unref(&packet);
_packetQueue.dequeue();
}
}
void FFMpegReaderImplementation::clearPacketQueue() {
finishPacket();
auto packets = base::take(_packetQueue);
for (auto &packetData : packets) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
av_packet_unref(&packet);
}
}
int FFMpegReaderImplementation::_read(void *opaque, uint8_t *buf, int buf_size) {
FFMpegReaderImplementation *l = reinterpret_cast<FFMpegReaderImplementation*>(opaque);
return int(l->_device->read((char*)(buf), buf_size));

View File

@ -15,6 +15,7 @@ extern "C" {
#include "media/clip/media_clip_implementation.h"
#include "media/audio/media_child_ffmpeg_loader.h"
#include "media/streaming/media_streaming_utility.h"
namespace Media {
namespace Clip {
@ -54,9 +55,9 @@ private:
EndOfFile,
Error,
};
PacketResult readPacket(AVPacket *packet);
void processPacket(AVPacket *packet);
crl::time countPacketMs(AVPacket *packet) const;
PacketResult readPacket(Streaming::Packet &packet);
void processPacket(Streaming::Packet &&packet);
crl::time countPacketMs(const Streaming::Packet &packet) const;
PacketResult readAndProcessPacket();
enum class Rotation {
@ -70,10 +71,6 @@ private:
return (_rotation == Rotation::Degrees90) || (_rotation == Rotation::Degrees270);
}
void startPacket();
void finishPacket();
void clearPacketQueue();
static int _read(void *opaque, uint8_t *buf, int buf_size);
static int64_t _seek(void *opaque, int64_t offset, int whence);
@ -86,7 +83,7 @@ private:
AVFormatContext *_fmtContext = nullptr;
AVCodecContext *_codecContext = nullptr;
int _streamId = 0;
AVFrame *_frame = nullptr;
Streaming::FramePointer _frame;
bool _opened = false;
bool _hadFrame = false;
bool _frameRead = false;
@ -98,10 +95,7 @@ private:
crl::time _lastReadVideoMs = 0;
crl::time _lastReadAudioMs = 0;
QQueue<FFMpeg::AVPacketDataWrap> _packetQueue;
int _packetStartedSize = 0;
uint8_t *_packetStartedData = nullptr;
bool _packetStarted = false;
std::deque<Streaming::Packet> _packetQueue;
int _width = 0;
int _height = 0;

View File

@ -90,7 +90,10 @@ Reader::Reader(const QString &filepath, Callback &&callback, Mode mode, crl::tim
Reader::Reader(not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode, crl::time seekMs)
: _callback(std::move(callback))
, _mode(mode)
, _audioMsgId(document, msgId, (mode == Mode::Video) ? rand_value<uint32>() : 0)
, _audioMsgId(
document,
msgId,
(mode == Mode::Video) ? AudioMsgId::CreateExternalPlayId() : 0)
, _seekPositionMs(seekMs) {
init(document->location(), document->data());
}

View File

@ -1,393 +0,0 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "media/player/media_player_cover.h"
#include "data/data_document.h"
#include "ui/widgets/labels.h"
#include "ui/widgets/continuous_sliders.h"
#include "ui/widgets/buttons.h"
#include "media/audio/media_audio.h"
#include "media/view/media_view_playback_progress.h"
#include "media/player/media_player_button.h"
#include "media/player/media_player_instance.h"
#include "media/player/media_player_volume_controller.h"
#include "styles/style_media_player.h"
#include "styles/style_mediaview.h"
#include "layout.h"
namespace Media {
namespace Player {
using ButtonState = PlayButtonLayout::State;
class CoverWidget::PlayButton : public Ui::AbstractButton {
public:
PlayButton(QWidget *parent);
void setState(ButtonState state) {
_layout.setState(state);
}
void finishTransform() {
_layout.finishTransform();
}
protected:
void paintEvent(QPaintEvent *e) override;
private:
PlayButtonLayout _layout;
};
CoverWidget::PlayButton::PlayButton(QWidget *parent) : Ui::AbstractButton(parent)
, _layout(st::mediaPlayerPanelButton, [this] { update(); }) {
resize(st::mediaPlayerPanelButtonSize);
setCursor(style::cur_pointer);
}
void CoverWidget::PlayButton::paintEvent(QPaintEvent *e) {
Painter p(this);
p.translate(st::mediaPlayerPanelButtonPosition.x(), st::mediaPlayerPanelButtonPosition.y());
_layout.paint(p, st::mediaPlayerActiveFg);
}
CoverWidget::CoverWidget(QWidget *parent) : RpWidget(parent)
, _nameLabel(this, st::mediaPlayerName)
, _timeLabel(this, st::mediaPlayerTime)
, _close(this, st::mediaPlayerPanelClose)
, _playbackSlider(this, st::mediaPlayerPanelPlayback)
, _playbackProgress(std::make_unique<View::PlaybackProgress>())
, _playPause(this)
, _volumeToggle(this, st::mediaPlayerVolumeToggle)
, _volumeController(this)
, _pinPlayer(this, st::mediaPlayerPanelPinButton)
, _repeatTrack(this, st::mediaPlayerRepeatButton) {
setAttribute(Qt::WA_OpaquePaintEvent);
resize(width(), st::mediaPlayerCoverHeight);
_close->hide();
_nameLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
_timeLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
setMouseTracking(true);
_playbackProgress->setInLoadingStateChangedCallback([=](bool loading) {
_playbackSlider->setDisabled(loading);
});
_playbackProgress->setValueChangedCallback([=](float64 value) {
_playbackSlider->setValue(value);
});
_playbackSlider->setChangeProgressCallback([=](float64 value) {
_playbackProgress->setValue(value, false);
handleSeekProgress(value);
});
_playbackSlider->setChangeFinishedCallback([=](float64 value) {
_playbackProgress->setValue(value, false);
handleSeekFinished(value);
});
_playPause->setClickedCallback([=] {
instance()->playPauseCancelClicked(AudioMsgId::Type::Song);
});
updateRepeatTrackIcon();
_repeatTrack->setClickedCallback([=] {
instance()->toggleRepeat(AudioMsgId::Type::Song);
});
updateVolumeToggleIcon();
_volumeToggle->setClickedCallback([=]() {
Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume());
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify();
});
subscribe(Global::RefSongVolumeChanged(), [=] { updateVolumeToggleIcon(); });
subscribe(instance()->repeatChangedNotifier(), [=](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Song) {
updateRepeatTrackIcon();
}
});
subscribe(instance()->updatedNotifier(), [=](const TrackState &state) {
if (state.id.type() == AudioMsgId::Type::Song) {
handleSongUpdate(state);
}
});
subscribe(instance()->trackChangedNotifier(), [=](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Song) {
handleSongChange();
}
});
instance()->playlistChanges(
AudioMsgId::Type::Song
) | rpl::start_with_next([=] {
handlePlaylistUpdate();
}, lifetime());
handleSongChange();
handleSongUpdate(mixer()->currentState(AudioMsgId::Type::Song));
_playPause->finishTransform();
}
void CoverWidget::setPinCallback(ButtonCallback &&callback) {
_pinPlayer->setClickedCallback(std::move(callback));
}
void CoverWidget::setCloseCallback(ButtonCallback &&callback) {
_close->setClickedCallback(std::move(callback));
}
void CoverWidget::handleSeekProgress(float64 progress) {
if (!_lastDurationMs) return;
auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs);
if (_seekPositionMs != positionMs) {
_seekPositionMs = positionMs;
updateTimeLabel();
instance()->startSeeking(AudioMsgId::Type::Song);
}
}
void CoverWidget::handleSeekFinished(float64 progress) {
if (!_lastDurationMs) return;
auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs);
_seekPositionMs = -1;
auto type = AudioMsgId::Type::Song;
auto state = Media::Player::mixer()->currentState(type);
if (state.id && state.length && state.frequency) {
Media::Player::mixer()->seek(type, qRound(progress * state.length * 1000. / state.frequency));
}
instance()->stopSeeking(type);
}
void CoverWidget::resizeEvent(QResizeEvent *e) {
auto widthForName = width() - 2 * (st::mediaPlayerPanelPadding);
widthForName -= _timeLabel->width() + 2 * st::normalFont->spacew;
_nameLabel->resizeToWidth(widthForName);
updateLabelPositions();
_close->moveToRight(0, 0);
int skip = (st::mediaPlayerPanelPlayback.seekSize.width() / 2);
int length = (width() - 2 * st::mediaPlayerPanelPadding + st::mediaPlayerPanelPlayback.seekSize.width());
_playbackSlider->setGeometry(st::mediaPlayerPanelPadding - skip, st::mediaPlayerPanelPlaybackTop, length, 2 * st::mediaPlayerPanelPlaybackPadding + st::mediaPlayerPanelPlayback.width);
auto top = st::mediaPlayerPanelVolumeToggleTop;
auto right = st::mediaPlayerPanelPlayLeft;
_repeatTrack->moveToRight(right, top); right += _repeatTrack->width();
_pinPlayer->moveToRight(right, top); right += _pinPlayer->width() + st::mediaPlayerPanelVolumeSkip;
_volumeController->moveToRight(right, st::mediaPlayerPanelVolumeTop); right += _volumeController->width() + st::mediaPlayerPanelVolumeToggleSkip;
_volumeToggle->moveToRight(right, top);
updatePlayPrevNextPositions();
}
void CoverWidget::paintEvent(QPaintEvent *e) {
Painter p(this);
p.fillRect(e->rect(), st::windowBg);
}
void CoverWidget::mouseMoveEvent(QMouseEvent *e) {
auto closeAreaLeft = st::mediaPlayerPanelPadding + _nameLabel->width();
auto closeAreaHeight = _nameLabel->y() + _nameLabel->height();
auto closeArea = myrtlrect(closeAreaLeft, 0, width() - closeAreaLeft, closeAreaHeight);
auto closeVisible = closeArea.contains(e->pos());
setCloseVisible(closeVisible);
}
void CoverWidget::leaveEventHook(QEvent *e) {
setCloseVisible(false);
}
void CoverWidget::setCloseVisible(bool visible) {
if (visible == _close->isHidden()) {
_close->setVisible(visible);
_timeLabel->setVisible(!visible);
}
}
void CoverWidget::updatePlayPrevNextPositions() {
auto left = st::mediaPlayerPanelPlayLeft;
auto top = st::mediaPlayerPanelPlayTop;
if (_previousTrack) {
_previousTrack->moveToLeft(left, top); left += _previousTrack->width() + st::mediaPlayerPanelPlaySkip;
_playPause->moveToLeft(left, top); left += _playPause->width() + st::mediaPlayerPanelPlaySkip;
_nextTrack->moveToLeft(left, top);
} else {
_playPause->moveToLeft(left, top);
}
}
void CoverWidget::updateLabelPositions() {
_nameLabel->moveToLeft(st::mediaPlayerPanelPadding, st::mediaPlayerPanelNameTop - st::mediaPlayerName.style.font->ascent);
_timeLabel->moveToRight(st::mediaPlayerPanelPadding, st::mediaPlayerPanelNameTop - st::mediaPlayerTime.font->ascent);
}
void CoverWidget::updateRepeatTrackIcon() {
_repeatTrack->setIconOverride(instance()->repeatEnabled(AudioMsgId::Type::Song) ? nullptr : &st::mediaPlayerRepeatInactiveIcon);
}
void CoverWidget::handleSongUpdate(const TrackState &state) {
if (!state.id.audio() || !state.id.audio()->isAudioFile()) {
return;
}
if (state.id.audio()->loading()) {
_playbackProgress->updateLoadingState(state.id.audio()->progress());
} else {
_playbackProgress->updateState(state);
}
auto stopped = IsStoppedOrStopping(state.state);
auto showPause = ShowPauseIcon(state.state);
if (instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
auto buttonState = [audio = state.id.audio(), showPause] {
if (audio->loading()) {
return ButtonState::Cancel;
} else if (showPause) {
return ButtonState::Pause;
}
return ButtonState::Play;
};
_playPause->setState(buttonState());
updateTimeText(state);
}
void CoverWidget::updateTimeText(const TrackState &state) {
QString time;
qint64 position = 0, length = 0, display = 0;
auto frequency = state.frequency;
if (!IsStoppedOrStopping(state.state)) {
display = position = state.position;
length = state.length;
} else if (const auto songData = state.id.audio()->song()) {
length = state.length ? state.length : (songData->duration * frequency);
}
_lastDurationMs = (state.length * 1000LL) / frequency;
if (state.id.audio()->loading()) {
_time = QString::number(qRound(state.id.audio()->progress() * 100)) + '%';
_playbackSlider->setDisabled(true);
} else {
display = display / frequency;
_time = formatDurationText(display);
_playbackSlider->setDisabled(false);
}
if (_seekPositionMs < 0) {
updateTimeLabel();
}
}
void CoverWidget::updateTimeLabel() {
auto timeLabelWidth = _timeLabel->width();
if (_seekPositionMs >= 0) {
auto playAlready = _seekPositionMs / 1000LL;
_timeLabel->setText(formatDurationText(playAlready));
} else {
_timeLabel->setText(_time);
}
if (timeLabelWidth != _timeLabel->width()) {
_nameLabel->resizeToWidth(width() - 2 * (st::mediaPlayerPanelPadding) - _timeLabel->width() - st::normalFont->spacew);
updateLabelPositions();
}
}
void CoverWidget::handleSongChange() {
const auto current = instance()->current(AudioMsgId::Type::Song);
const auto document = current.audio();
if (!current || !document) {
return;
}
TextWithEntities textWithEntities;
const auto song = document ? document->song() : nullptr;
if (!song) {
textWithEntities.text = document->filename().isEmpty()
? qsl("Unknown Track")
: document->filename();
} else if (song->performer.isEmpty()) {
textWithEntities.text = song->title.isEmpty()
? (document->filename().isEmpty()
? qsl("Unknown Track")
: document->filename())
: song->title;
} else {
auto title = song->title.isEmpty()
? qsl("Unknown Track")
: TextUtilities::Clean(song->title);
textWithEntities.text = song->performer + QString::fromUtf8(" \xe2\x80\x93 ") + title;
textWithEntities.entities.append({ EntityInTextBold, 0, song->performer.size(), QString() });
}
_nameLabel->setMarkedText(textWithEntities);
handlePlaylistUpdate();
}
void CoverWidget::handlePlaylistUpdate() {
const auto type = AudioMsgId::Type::Song;
const auto previousEnabled = instance()->previousAvailable(type);
const auto nextEnabled = instance()->nextAvailable(type);
if (!previousEnabled && !nextEnabled) {
destroyPrevNextButtons();
} else {
createPrevNextButtons();
_previousTrack->setIconOverride(previousEnabled ? nullptr : &st::mediaPlayerPanelPreviousDisabledIcon);
_previousTrack->setCursor(previousEnabled ? style::cur_pointer : style::cur_default);
_nextTrack->setIconOverride(nextEnabled ? nullptr : &st::mediaPlayerPanelNextDisabledIcon);
_nextTrack->setCursor(nextEnabled ? style::cur_pointer : style::cur_default);
}
}
void CoverWidget::createPrevNextButtons() {
if (!_previousTrack) {
_previousTrack.create(this, st::mediaPlayerPanelPreviousButton);
_previousTrack->show();
_previousTrack->setClickedCallback([=]() {
instance()->previous();
});
_nextTrack.create(this, st::mediaPlayerPanelNextButton);
_nextTrack->show();
_nextTrack->setClickedCallback([=]() {
instance()->next();
});
updatePlayPrevNextPositions();
}
}
void CoverWidget::destroyPrevNextButtons() {
if (_previousTrack) {
_previousTrack.destroy();
_nextTrack.destroy();
updatePlayPrevNextPositions();
}
}
void CoverWidget::updateVolumeToggleIcon() {
auto icon = []() -> const style::icon * {
auto volume = Global::SongVolume();
if (volume > 0) {
if (volume < 1 / 3.) {
return &st::mediaPlayerVolumeIcon1;
} else if (volume < 2 / 3.) {
return &st::mediaPlayerVolumeIcon2;
}
return &st::mediaPlayerVolumeIcon3;
}
return nullptr;
};
_volumeToggle->setIconOverride(icon());
}
} // namespace Player
} // namespace Media

View File

@ -1,86 +0,0 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/rp_widget.h"
class AudioMsgId;
namespace Ui {
class FlatLabel;
class LabelSimple;
class IconButton;
class MediaSlider;
} // namespace Ui
namespace Media {
namespace View {
class PlaybackProgress;
} // namespace View
namespace Player {
class VolumeController;
struct TrackState;
class CoverWidget : public Ui::RpWidget, private base::Subscriber {
public:
CoverWidget(QWidget *parent);
using ButtonCallback = Fn<void()>;
void setPinCallback(ButtonCallback &&callback);
void setCloseCallback(ButtonCallback &&callback);
protected:
void resizeEvent(QResizeEvent *e) override;
void paintEvent(QPaintEvent *e) override;
void mouseMoveEvent(QMouseEvent *e) override;
void leaveEventHook(QEvent *e) override;
private:
void setCloseVisible(bool visible);
void handleSeekProgress(float64 progress);
void handleSeekFinished(float64 progress);
void updatePlayPrevNextPositions();
void updateLabelPositions();
void updateRepeatTrackIcon();
void createPrevNextButtons();
void destroyPrevNextButtons();
void updateVolumeToggleIcon();
void handleSongUpdate(const TrackState &state);
void handleSongChange();
void handlePlaylistUpdate();
void updateTimeText(const TrackState &state);
void updateTimeLabel();
crl::time _seekPositionMs = -1;
crl::time _lastDurationMs = 0;
QString _time;
class PlayButton;
object_ptr<Ui::FlatLabel> _nameLabel;
object_ptr<Ui::LabelSimple> _timeLabel;
object_ptr<Ui::IconButton> _close;
object_ptr<Ui::MediaSlider> _playbackSlider;
std::unique_ptr<View::PlaybackProgress> _playbackProgress;
object_ptr<Ui::IconButton> _previousTrack = { nullptr };
object_ptr<PlayButton> _playPause;
object_ptr<Ui::IconButton> _nextTrack = { nullptr };
object_ptr<Ui::IconButton> _volumeToggle;
object_ptr<VolumeController> _volumeController;
object_ptr<Ui::IconButton> _pinPlayer;
object_ptr<Ui::IconButton> _repeatTrack;
};
} // namespace Player
} // namespace Media

View File

@ -11,6 +11,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_session.h"
#include "media/audio/media_audio.h"
#include "media/audio/media_audio_capture.h"
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_loader.h"
#include "calls/calls_instance.h"
#include "history/history.h"
#include "history/history_item.h"
@ -50,6 +52,34 @@ void finish(not_null<Audio::Instance*> instance) {
Audio::Finish(instance);
}
struct Instance::Streamed {
Streamed(
AudioMsgId id,
not_null<::Data::Session*> owner,
std::unique_ptr<Streaming::Loader> loader);
AudioMsgId id;
Streaming::Player player;
Streaming::Information info;
};
Instance::Streamed::Streamed(
AudioMsgId id,
not_null<::Data::Session*> owner,
std::unique_ptr<Streaming::Loader> loader)
: id(id)
, player(owner, std::move(loader)) {
}
Instance::Data::Data(AudioMsgId::Type type, SharedMediaType overview)
: type(type)
, overview(overview) {
}
Instance::Data::Data(Data &&other) = default;
Instance::Data &Instance::Data::operator=(Data &&other) = default;
Instance::Data::~Data() = default;
Instance::Instance()
: _songData(AudioMsgId::Type::Song, SharedMediaType::MusicFile)
, _voiceData(AudioMsgId::Type::Voice, SharedMediaType::RoundVoiceFile) {
@ -69,8 +99,6 @@ Instance::Instance()
resumeOnCall(AudioMsgId::Type::Song);
}
});
} else {
handleLogout();
}
};
subscribe(
@ -81,10 +109,12 @@ Instance::Instance()
setupShortcuts();
}
Instance::~Instance() = default;
AudioMsgId::Type Instance::getActiveType() const {
auto voiceData = getData(AudioMsgId::Type::Voice);
if (voiceData->current) {
auto state = mixer()->currentState(voiceData->type);
const auto state = getState(voiceData->type);
if (voiceData->current == state.id && !IsStoppedOrStopping(state.state)) {
return voiceData->type;
}
@ -99,26 +129,40 @@ void Instance::handleSongUpdate(const AudioMsgId &audioId) {
}
void Instance::setCurrent(const AudioMsgId &audioId) {
if (auto data = getData(audioId.type())) {
if (data->current != audioId) {
data->current = audioId;
data->isPlaying = false;
auto history = data->history;
auto migrated = data->migrated;
auto item = data->current
? App::histItemById(data->current.contextId())
: nullptr;
if (item) {
data->history = item->history()->migrateToOrMe();
data->migrated = data->history->migrateFrom();
} else {
data->history = nullptr;
data->migrated = nullptr;
}
_trackChangedNotifier.notify(data->type, true);
refreshPlaylist(data);
if (const auto data = getData(audioId.type())) {
if (data->current == audioId) {
return;
}
const auto trackChanged = (data->current.audio() != audioId.audio())
|| (data->current.contextId() != audioId.contextId());
data->current = audioId;
if (!trackChanged) {
return;
}
const auto streamedId = data->streamed
? data->streamed->id
: AudioMsgId();
if (streamedId.audio() != audioId.audio()
|| streamedId.contextId() != audioId.contextId()) {
data->streamed = nullptr;
}
data->current = audioId;
data->isPlaying = false;
auto history = data->history;
auto migrated = data->migrated;
auto item = data->current
? App::histItemById(data->current.contextId())
: nullptr;
if (item) {
data->history = item->history()->migrateToOrMe();
data->migrated = data->history->migrateFrom();
} else {
data->history = nullptr;
data->migrated = nullptr;
}
_trackChangedNotifier.notify(data->type, true);
refreshPlaylist(data);
}
}
@ -241,12 +285,6 @@ bool Instance::moveInPlaylist(
|| document->isVoiceMessage()
|| document->isVideoMessage()) {
play(AudioMsgId(document, item->fullId()));
} else {
//DocumentOpenClickHandler::Open(
// item->fullId(),
// document,
// item,
// ActionOnLoadPlayInline);
}
return true;
}
@ -284,19 +322,20 @@ Instance *instance() {
}
void Instance::play(AudioMsgId::Type type) {
auto state = mixer()->currentState(type);
if (state.id) {
if (IsStopped(state.state)) {
play(state.id);
} else {
mixer()->resume(state.id);
}
} else if (auto data = getData(type)) {
if (data->current) {
if (const auto data = getData(type)) {
const auto state = getState(type);
if (state.id) {
if (IsStopped(state.state)) {
play(state.id);
} else if (data->streamed) {
data->streamed->player.resume();
emitUpdate(type);
} else {
mixer()->resume(state.id);
}
} else if (data->current) {
play(data->current);
}
}
if (const auto data = getData(type)) {
data->resumeOnCallEnd = false;
}
}
@ -306,7 +345,13 @@ void Instance::play(const AudioMsgId &audioId) {
if (!audioId || !document) {
return;
}
if (document->isAudioFile() || document->isVoiceMessage()) {
if (document->isAudioFile()) {
auto loader = document->createStreamingLoader(audioId.contextId());
if (!loader) {
return;
}
playStreamed(audioId, std::move(loader));
} else if (document->isVoiceMessage()) {
mixer()->play(audioId);
setCurrent(audioId);
if (document->loading()) {
@ -322,45 +367,115 @@ void Instance::play(const AudioMsgId &audioId) {
}
}
void Instance::playPause(const AudioMsgId &audioId) {
const auto now = current(audioId.type());
if (now.audio() == audioId.audio()
&& now.contextId() == audioId.contextId()) {
playPause(audioId.type());
} else {
play(audioId);
}
}
void Instance::playStreamed(
const AudioMsgId &audioId,
std::unique_ptr<Streaming::Loader> loader) {
Expects(audioId.audio() != nullptr);
const auto data = getData(audioId.type());
Assert(data != nullptr);
data->streamed = std::make_unique<Streamed>(
audioId,
&audioId.audio()->owner(),
std::move(loader));
data->streamed->player.updates(
) | rpl::start_with_next_error([=](Streaming::Update &&update) {
handleStreamingUpdate(data, std::move(update));
}, [=](Streaming::Error && error) {
handleStreamingError(data, std::move(error));
}, data->streamed->player.lifetime());
data->streamed->player.play(streamingOptions(audioId));
emitUpdate(audioId.type());
}
Streaming::PlaybackOptions Instance::streamingOptions(
const AudioMsgId &audioId,
crl::time position) {
auto result = Streaming::PlaybackOptions();
result.mode = Streaming::Mode::Audio;
result.audioId = audioId;
result.position = position;
return result;
}
void Instance::pause(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type);
if (state.id) {
mixer()->pause(state.id);
if (const auto data = getData(type)) {
if (data->streamed) {
data->streamed->player.pause();
emitUpdate(type);
} else {
const auto state = getState(type);
if (state.id) {
mixer()->pause(state.id);
}
}
}
}
void Instance::stop(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type);
if (state.id) {
mixer()->stop(state.id);
}
if (const auto data = getData(type)) {
if (data->streamed) {
data->streamed = nullptr;
} else {
const auto state = getState(type);
if (state.id) {
mixer()->stop(state.id);
}
}
data->resumeOnCallEnd = false;
}
}
void Instance::playPause(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type);
if (state.id) {
if (IsStopped(state.state)) {
play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->resume(state.id);
} else {
mixer()->pause(state.id);
}
} else if (auto data = getData(type)) {
if (data->current) {
play(data->current);
}
}
if (const auto data = getData(type)) {
if (data->streamed) {
if (data->streamed->player.finished()) {
auto options = Streaming::PlaybackOptions();
options.mode = Streaming::Mode::Audio;
options.audioId = data->streamed->id;
data->streamed->player.play(options);
} else if (data->streamed->player.paused()) {
data->streamed->player.resume();
} else {
data->streamed->player.pause();
}
emitUpdate(type);
} else {
const auto state = getState(type);
if (state.id) {
if (IsStopped(state.state)) {
play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->resume(state.id);
} else {
mixer()->pause(state.id);
}
} else if (auto data = getData(type)) {
if (data->current) {
play(data->current);
}
}
}
data->resumeOnCallEnd = false;
}
}
void Instance::pauseOnCall(AudioMsgId::Type type) {
const auto state = mixer()->currentState(type);
const auto state = getState(type);
if (!state.id
|| IsStopped(state.state)
|| IsPaused(state.state)
@ -401,11 +516,15 @@ void Instance::playPauseCancelClicked(AudioMsgId::Type type) {
return;
}
auto state = mixer()->currentState(type);
auto stopped = IsStoppedOrStopping(state.state);
auto showPause = ShowPauseIcon(state.state);
auto audio = state.id.audio();
if (audio && audio->loading()) {
const auto data = getData(type);
if (!data) {
return;
}
const auto state = getState(type);
const auto stopped = IsStoppedOrStopping(state.state);
const auto showPause = ShowPauseIcon(state.state);
const auto audio = state.id.audio();
if (audio && audio->loading() && !data->streamed) {
audio->cancel();
} else if (showPause) {
pause(type);
@ -419,36 +538,69 @@ void Instance::startSeeking(AudioMsgId::Type type) {
data->seeking = data->current;
}
pause(type);
emitUpdate(type, [](const AudioMsgId &playing) { return true; });
emitUpdate(type);
}
void Instance::stopSeeking(AudioMsgId::Type type) {
if (auto data = getData(type)) {
void Instance::finishSeeking(AudioMsgId::Type type, float64 progress) {
if (const auto data = getData(type)) {
if (data->streamed) {
const auto duration = data->streamed->info.audio.state.duration;
if (duration != kTimeUnknown) {
const auto position = crl::time(std::round(
std::clamp(progress, 0., 1.) * duration));
data->streamed->player.play(streamingOptions(
data->streamed->id,
position));
emitUpdate(type);
}
} else {
const auto state = getState(type);
if (state.id && state.length && state.frequency) {
mixer()->seek(type, qRound(progress * state.length * 1000. / state.frequency));
}
}
}
cancelSeeking(type);
}
void Instance::cancelSeeking(AudioMsgId::Type type) {
if (const auto data = getData(type)) {
data->seeking = AudioMsgId();
}
emitUpdate(type, [](const AudioMsgId &playing) { return true; });
emitUpdate(type);
}
void Instance::documentLoadProgress(DocumentData *document) {
const auto type = document->isAudioFile()
? AudioMsgId::Type::Song
: AudioMsgId::Type::Voice;
emitUpdate(type, [document](const AudioMsgId &audioId) {
emitUpdate(type, [&](const AudioMsgId &audioId) {
return (audioId.audio() == document);
});
}
void Instance::emitUpdate(AudioMsgId::Type type) {
emitUpdate(type, [](const AudioMsgId &playing) { return true; });
}
TrackState Instance::getState(AudioMsgId::Type type) const {
if (const auto data = getData(type)) {
if (data->streamed) {
return data->streamed->player.prepareLegacyState();
}
}
return mixer()->currentState(type);
}
template <typename CheckCallback>
void Instance::emitUpdate(AudioMsgId::Type type, CheckCallback check) {
auto state = mixer()->currentState(type);
if (!state.id || !check(state.id)) {
return;
}
setCurrent(state.id);
_updatedNotifier.notify(state, true);
if (auto data = getData(type)) {
if (const auto data = getData(type)) {
const auto state = getState(type);
if (!state.id || !check(state.id)) {
return;
}
setCurrent(state.id);
_updatedNotifier.notify(state, true);
if (data->isPlaying && state.state == State::StoppedAtEnd) {
if (data->repeatEnabled) {
play(data->current);
@ -467,25 +619,25 @@ void Instance::emitUpdate(AudioMsgId::Type type, CheckCallback check) {
}
void Instance::preloadNext(not_null<Data*> data) {
if (!data->current || !data->playlistSlice || !data->playlistIndex) {
return;
}
const auto nextIndex = *data->playlistIndex + 1;
if (const auto item = itemByIndex(data, nextIndex)) {
if (const auto media = item->media()) {
if (const auto document = media->document()) {
const auto isLoaded = document->loaded(
DocumentData::FilePathResolveSaveFromDataSilent);
if (!isLoaded) {
DocumentOpenClickHandler::Open(
item->fullId(),
document,
item,
ActionOnLoadNone);
}
}
}
}
//if (!data->current || !data->playlistSlice || !data->playlistIndex) {
// return;
//}
//const auto nextIndex = *data->playlistIndex + 1;
//if (const auto item = itemByIndex(data, nextIndex)) {
// if (const auto media = item->media()) {
// if (const auto document = media->document()) {
// const auto isLoaded = document->loaded(
// DocumentData::FilePathResolveSaveFromDataSilent);
// if (!isLoaded) {
// DocumentOpenClickHandler::Open(
// item->fullId(),
// document,
// item,
// ActionOnLoadNone);
// }
// }
// }
//}
}
void Instance::handleLogout() {
@ -495,7 +647,6 @@ void Instance::handleLogout() {
};
reset(AudioMsgId::Type::Voice);
reset(AudioMsgId::Type::Song);
_usePanelPlayer.notify(false, true);
}
void Instance::setupShortcuts() {
@ -529,5 +680,37 @@ void Instance::setupShortcuts() {
}, _lifetime);
}
void Instance::handleStreamingUpdate(
not_null<Data*> data,
Streaming::Update &&update) {
using namespace Streaming;
update.data.match([&](Information & update) {
data->streamed->info = std::move(update);
emitUpdate(data->type);
}, [&](PreloadedVideo &update) {
}, [&](UpdateVideo &update) {
}, [&](PreloadedAudio & update) {
data->streamed->info.audio.state.receivedTill = update.till;
//emitUpdate(data->type, [](AudioMsgId) { return true; });
}, [&](UpdateAudio &update) {
data->streamed->info.audio.state.position = update.position;
emitUpdate(data->type);
}, [&](WaitingForData) {
}, [&](MutedByOther) {
}, [&](Finished) {
const auto finishTrack = [](Media::Streaming::TrackState &state) {
state.position = state.receivedTill = state.duration;
};
finishTrack(data->streamed->info.audio.state);
emitUpdate(data->type);
});
}
void Instance::handleStreamingError(
not_null<Data*> data,
Streaming::Error &&error) {
}
} // namespace Player
} // namespace Media

View File

@ -15,7 +15,15 @@ namespace Media {
namespace Audio {
class Instance;
} // namespace Audio
namespace Streaming {
class Loader;
struct PlaybackOptions;
struct Update;
struct Error;
} // namespace Streaming
} // namespace Media
namespace Media {
namespace Player {
void start(not_null<Audio::Instance*> instance);
@ -59,6 +67,9 @@ public:
void playPauseCancelClicked(AudioMsgId::Type type);
void play(const AudioMsgId &audioId);
void playPause(const AudioMsgId &audioId);
TrackState getState(AudioMsgId::Type type) const;
AudioMsgId current(AudioMsgId::Type type) const {
if (auto data = getData(type)) {
return data->current;
@ -86,7 +97,8 @@ public:
return false;
}
void startSeeking(AudioMsgId::Type type);
void stopSeeking(AudioMsgId::Type type);
void finishSeeking(AudioMsgId::Type type, float64 progress);
void cancelSeeking(AudioMsgId::Type type);
bool nextAvailable(AudioMsgId::Type type) const;
bool previousAvailable(AudioMsgId::Type type) const;
@ -99,12 +111,6 @@ public:
base::Observable<Switch> &switchToNextNotifier() {
return _switchToNextNotifier;
}
base::Observable<bool> &usePanelPlayer() {
return _usePanelPlayer;
}
base::Observable<bool> &titleButtonOver() {
return _titleButtonOver;
}
base::Observable<bool> &playerWidgetOver() {
return _playerWidgetOver;
}
@ -125,21 +131,17 @@ public:
void documentLoadProgress(DocumentData *document);
void clear();
void handleLogout();
private:
Instance();
friend void start(not_null<Audio::Instance*> instance);
void setupShortcuts();
using SharedMediaType = Storage::SharedMediaType;
using SliceKey = SparseIdsMergedSlice::Key;
struct Streamed;
struct Data {
Data(AudioMsgId::Type type, SharedMediaType overview)
: type(type)
, overview(overview) {
}
Data(AudioMsgId::Type type, SharedMediaType overview);
Data(Data &&other);
Data &operator=(Data &&other);
~Data();
AudioMsgId::Type type;
Storage::SharedMediaType overview;
@ -156,8 +158,23 @@ private:
bool repeatEnabled = false;
bool isPlaying = false;
bool resumeOnCallEnd = false;
std::unique_ptr<Streamed> streamed;
};
Instance();
~Instance();
friend void start(not_null<Audio::Instance*> instance);
friend void finish(not_null<Audio::Instance*> instance);
void setupShortcuts();
void playStreamed(
const AudioMsgId &audioId,
std::unique_ptr<Streaming::Loader> loader);
Streaming::PlaybackOptions streamingOptions(
const AudioMsgId &audioId,
crl::time position = 0);
// Observed notifications.
void handleSongUpdate(const AudioMsgId &audioId);
@ -173,8 +190,15 @@ private:
bool moveInPlaylist(not_null<Data*> data, int delta, bool autonext);
void preloadNext(not_null<Data*> data);
HistoryItem *itemByIndex(not_null<Data*> data, int index);
void handleLogout();
void handleStreamingUpdate(
not_null<Data*> data,
Streaming::Update &&update);
void handleStreamingError(
not_null<Data*> data,
Streaming::Error &&error);
void emitUpdate(AudioMsgId::Type type);
template <typename CheckCallback>
void emitUpdate(AudioMsgId::Type type, CheckCallback check);
@ -200,8 +224,6 @@ private:
Data _voiceData;
base::Observable<Switch> _switchToNextNotifier;
base::Observable<bool> _usePanelPlayer;
base::Observable<bool> _titleButtonOver;
base::Observable<bool> _playerWidgetOver;
base::Observable<TrackState> _updatedNotifier;
base::Observable<AudioMsgId::Type> _tracksFinishedNotifier;

View File

@ -7,7 +7,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "media/player/media_player_panel.h"
#include "media/player/media_player_cover.h"
#include "media/player/media_player_instance.h"
#include "info/media/info_media_list_widget.h"
#include "history/history.h"
@ -37,11 +36,9 @@ constexpr auto kDelayedHideTimeout = crl::time(3000);
Panel::Panel(
QWidget *parent,
not_null<Window::Controller*> window,
Layout layout)
not_null<Window::Controller*> window)
: RpWidget(parent)
, AbstractController(window)
, _layout(layout)
, _showTimer([this] { startShow(); })
, _hideTimer([this] { startHideChecked(); })
, _scroll(this, st::mediaPlayerScroll) {
@ -68,7 +65,7 @@ void Panel::resizeEvent(QResizeEvent *e) {
}
void Panel::listHeightUpdated(int newHeight) {
if (newHeight > emptyInnerHeight() || _cover) {
if (newHeight > emptyInnerHeight()) {
updateSize();
} else {
_hideTimer.callOnce(0);
@ -79,7 +76,7 @@ bool Panel::contentTooSmall() const {
const auto innerHeight = _scroll->widget()
? _scroll->widget()->height()
: emptyInnerHeight();
return (innerHeight <= emptyInnerHeight() && !_cover);
return (innerHeight <= emptyInnerHeight());
}
int Panel::emptyInnerHeight() const {
@ -100,15 +97,6 @@ bool Panel::preventAutoHide() const {
void Panel::updateControlsGeometry() {
auto scrollTop = contentTop();
auto width = contentWidth();
if (_cover) {
_cover->resizeToWidth(width);
_cover->moveToRight(contentRight(), scrollTop);
scrollTop += _cover->height();
if (_scrollShadow) {
_scrollShadow->resize(width, st::mediaPlayerScrollShadow.extend.bottom());
_scrollShadow->moveToRight(contentRight(), scrollTop);
}
}
auto scrollHeight = qMax(height() - scrollTop - contentBottom() - scrollMarginBottom(), 0);
if (scrollHeight > 0) {
_scroll->setGeometryToRight(contentRight(), scrollTop, width, scrollHeight);
@ -135,9 +123,6 @@ void Panel::scrollPlaylistToCurrentTrack() {
void Panel::updateSize() {
auto width = contentLeft() + st::mediaPlayerPanelWidth + contentRight();
auto height = contentTop();
if (_cover) {
height += _cover->height();
}
auto listHeight = 0;
if (auto widget = _scroll->widget()) {
listHeight = widget->height();
@ -147,9 +132,6 @@ void Panel::updateSize() {
height += scrollHeight + contentBottom();
resize(width, height);
_scroll->setVisible(scrollVisible);
if (_scrollShadow) {
_scrollShadow->setVisible(scrollVisible);
}
}
void Panel::paintEvent(QPaintEvent *e) {
@ -173,10 +155,10 @@ void Panel::paintEvent(QPaintEvent *e) {
// draw shadow
auto shadowedRect = myrtlrect(contentLeft(), contentTop(), contentWidth(), contentHeight());
auto shadowedSides = (rtl() ? RectPart::Right : RectPart::Left) | RectPart::Bottom;
if (_layout != Layout::Full) {
shadowedSides |= (rtl() ? RectPart::Left : RectPart::Right) | RectPart::Top;
}
auto shadowedSides = (rtl() ? RectPart::Right : RectPart::Left)
| RectPart::Bottom
| (rtl() ? RectPart::Left : RectPart::Right)
| RectPart::Top;
Ui::Shadow::paint(p, shadowedRect, width(), st::defaultRoundShadow, shadowedSides);
auto parts = RectPart::Full;
App::roundRect(p, shadowedRect, st::menuBg, MenuCorners, nullptr, parts);
@ -228,13 +210,6 @@ void Panel::hideFromOther() {
void Panel::ensureCreated() {
if (_scroll->widget()) return;
if (_layout == Layout::Full) {
_cover.create(this);
setPinCallback(std::move(_pinCallback));
setCloseCallback(std::move(_closeCallback));
_scrollShadow.create(this, st::mediaPlayerScrollShadow, RectPart::Bottom);
}
_refreshListLifetime = instance()->playlistChanges(
AudioMsgId::Type::Song
) | rpl::start_with_next([this] {
@ -328,7 +303,6 @@ void Panel::refreshList() {
void Panel::performDestroy() {
if (!_scroll->widget()) return;
_cover.destroy();
_scroll->takeWidget<QWidget>().destroy();
_listPeer = _listMigratedPeer = nullptr;
_refreshListLifetime.destroy();
@ -344,20 +318,6 @@ void Panel::performDestroy() {
}
}
void Panel::setPinCallback(ButtonCallback &&callback) {
_pinCallback = std::move(callback);
if (_cover) {
_cover->setPinCallback(ButtonCallback(_pinCallback));
}
}
void Panel::setCloseCallback(ButtonCallback &&callback) {
_closeCallback = std::move(callback);
if (_cover) {
_cover->setCloseCallback(ButtonCallback(_closeCallback));
}
}
Info::Key Panel::key() const {
return Info::Key(_listPeer);
}
@ -444,11 +404,11 @@ int Panel::contentLeft() const {
}
int Panel::contentTop() const {
return (_layout == Layout::Full) ? 0 : st::mediaPlayerPanelMarginLeft;
return st::mediaPlayerPanelMarginLeft;
}
int Panel::contentRight() const {
return (_layout == Layout::Full) ? 0 : st::mediaPlayerPanelMarginLeft;
return st::mediaPlayerPanelMarginLeft;
}
int Panel::contentBottom() const {

View File

@ -27,14 +27,9 @@ class CoverWidget;
class Panel : public Ui::RpWidget, private Info::AbstractController {
public:
enum class Layout {
Full,
OnlyPlaylist,
};
Panel(
QWidget *parent,
not_null<Window::Controller*> controller,
Layout layout);
not_null<Window::Controller*> controller);
bool overlaps(const QRect &globalRect);
@ -43,10 +38,6 @@ public:
void showFromOther();
void hideFromOther();
using ButtonCallback = Fn<void()>;
void setPinCallback(ButtonCallback &&callback);
void setCloseCallback(ButtonCallback &&callback);
int bestPositionFor(int left) const;
protected:
@ -96,7 +87,6 @@ private:
return static_cast<Info::AbstractController*>(this);
}
Layout _layout;
bool _hiding = false;
QPixmap _cache;
@ -107,10 +97,7 @@ private:
base::Timer _showTimer;
base::Timer _hideTimer;
ButtonCallback _pinCallback, _closeCallback;
object_ptr<CoverWidget> _cover = { nullptr };
object_ptr<Ui::ScrollArea> _scroll;
object_ptr<Ui::Shadow> _scrollShadow = { nullptr };
rpl::lifetime _refreshListLifetime;
PeerData *_listPeer = nullptr;

View File

@ -156,8 +156,8 @@ Widget::Widget(QWidget *parent) : RpWidget(parent)
subscribe(instance()->tracksFinishedNotifier(), [this](AudioMsgId::Type type) {
if (type == AudioMsgId::Type::Voice) {
_voiceIsActive = false;
auto currentSong = instance()->current(AudioMsgId::Type::Song);
auto songState = mixer()->currentState(AudioMsgId::Type::Song);
const auto currentSong = instance()->current(AudioMsgId::Type::Song);
const auto songState = instance()->getState(AudioMsgId::Type::Song);
if (currentSong == songState.id && !IsStoppedOrStopping(songState.state)) {
setType(AudioMsgId::Type::Song);
}
@ -191,8 +191,8 @@ void Widget::setCloseCallback(Fn<void()> callback) {
void Widget::stopAndClose() {
_voiceIsActive = false;
if (_type == AudioMsgId::Type::Voice) {
auto songData = instance()->current(AudioMsgId::Type::Song);
auto songState = mixer()->currentState(AudioMsgId::Type::Song);
const auto songData = instance()->current(AudioMsgId::Type::Song);
const auto songState = instance()->getState(AudioMsgId::Type::Song);
if (songData == songState.id && !IsStoppedOrStopping(songState.state)) {
instance()->stop(AudioMsgId::Type::Voice);
return;
@ -248,12 +248,7 @@ void Widget::handleSeekFinished(float64 progress) {
auto positionMs = snap(static_cast<crl::time>(progress * _lastDurationMs), 0LL, _lastDurationMs);
_seekPositionMs = -1;
auto state = mixer()->currentState(_type);
if (state.id && state.length && state.frequency) {
mixer()->seek(_type, qRound(progress * state.length * 1000. / state.frequency));
}
instance()->stopSeeking(_type);
instance()->finishSeeking(_type, progress);
}
void Widget::resizeEvent(QResizeEvent *e) {
@ -382,8 +377,8 @@ void Widget::updatePlaybackSpeedIcon() {
void Widget::checkForTypeChange() {
auto hasActiveType = [](AudioMsgId::Type type) {
auto current = instance()->current(type);
auto state = mixer()->currentState(type);
const auto current = instance()->current(type);
const auto state = instance()->getState(type);
return (current == state.id && !IsStoppedOrStopping(state.state));
};
if (hasActiveType(AudioMsgId::Type::Voice)) {
@ -410,7 +405,7 @@ void Widget::setType(AudioMsgId::Type type) {
}
updateLabelsGeometry();
handleSongChange();
handleSongUpdate(mixer()->currentState(_type));
handleSongUpdate(instance()->getState(_type));
updateOverLabelsState(_labelsOver);
_playlistChangesLifetime = instance()->playlistChanges(
_type

View File

@ -29,7 +29,7 @@ AudioTrack::AudioTrack(
, _playPosition(options.position) {
Expects(_ready != nullptr);
Expects(_error != nullptr);
Expects(_audioId.playId() != 0);
Expects(_audioId.externalPlayId() != 0);
}
int AudioTrack::streamIndex() const {
@ -66,11 +66,7 @@ bool AudioTrack::tryReadFirstFrame(Packet &&packet) {
}
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
if (!_initialSkippingFrame) {
return false;
}
// Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
return false;
@ -83,10 +79,6 @@ bool AudioTrack::tryReadFirstFrame(Packet &&packet) {
} else if (_startedPosition < _options.position) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = MakeFramePointer();
}
return true;
} else {
return processFirstFrame();
@ -111,12 +103,13 @@ bool AudioTrack::fillStateFromFrame() {
void AudioTrack::mixerInit() {
Expects(!initialized());
auto data = std::make_unique<VideoSoundData>();
data->frame = _stream.frame.release();
data->context = _stream.codec.release();
auto data = std::make_unique<ExternalSoundData>();
data->frame = std::move(_stream.frame);
data->codec = std::move(_stream.codec);
data->frequency = _stream.frequency;
data->length = (_stream.duration * data->frequency) / 1000LL;
data->speed = _options.speed;
Media::Player::mixer()->play(
_audioId,
std::move(data),
@ -136,15 +129,14 @@ void AudioTrack::callReady() {
}
void AudioTrack::mixerEnqueue(Packet &&packet) {
Media::Player::mixer()->feedFromVideo({
&packet.fields(),
_audioId
Media::Player::mixer()->feedFromExternal({
_audioId,
std::move(packet)
});
packet.release();
}
void AudioTrack::mixerForceToBuffer() {
Media::Player::mixer()->forceToBufferVideo(_audioId);
Media::Player::mixer()->forceToBufferExternal(_audioId);
}
void AudioTrack::pause(crl::time time) {
@ -161,7 +153,7 @@ void AudioTrack::resume(crl::time time) {
void AudioTrack::setSpeed(float64 speed) {
_options.speed = speed;
Media::Player::mixer()->setSpeedFromVideo(_audioId, speed);
Media::Player::mixer()->setSpeedFromExternal(_audioId, speed);
}
rpl::producer<> AudioTrack::waitingForData() const {
@ -178,8 +170,8 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
if (id != _audioId) {
return;
}
const auto type = AudioMsgId::Type::Video;
const auto state = Media::Player::mixer()->currentState(type);
const auto state = Media::Player::mixer()->currentState(
_audioId.type());
if (state.id != _audioId) {
// #TODO streaming later muted by other
return;
@ -212,7 +204,8 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
}
AudioTrack::~AudioTrack() {
if (_audioId.playId()) {
if (_audioId.externalPlayId()) {
LOG(("mixer()->stop with %1").arg(_audioId.externalPlayId()));
Media::Player::mixer()->stop(_audioId);
}
}

View File

@ -35,6 +35,7 @@ struct PlaybackOptions {
Mode mode = Mode::Both;
crl::time position = 0;
float64 speed = 1.; // Valid values between 0.5 and 2.
AudioMsgId audioId;
bool syncVideoByAudio = true;
bool dropStaleFrames = true;
};

View File

@ -12,6 +12,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_audio_track.h"
#include "media/streaming/media_streaming_video_track.h"
#include "media/audio/media_audio.h" // for SupportsSpeedControl()
#include "data/data_document.h" // for DocumentData::duration()
namespace Media {
namespace Streaming {
@ -20,6 +21,7 @@ namespace {
constexpr auto kReceivedTillEnd = std::numeric_limits<crl::time>::max();
constexpr auto kBufferFor = 3 * crl::time(1000);
constexpr auto kLoadInAdvanceFor = 64 * crl::time(1000);
constexpr auto kMsFrequency = 1000; // 1000 ms per second.
// If we played for 3 seconds and got stuck it looks like we're loading
// slower than we're playing, so load full file in that case.
@ -132,7 +134,9 @@ void Player::trackReceivedTill(
} else {
state.receivedTill = position;
}
if (!_pauseReading && bothReceivedEnough(kLoadInAdvanceFor)) {
if (!_pauseReading
&& bothReceivedEnough(kLoadInAdvanceFor)
&& !receivedTillEnd()) {
_pauseReading = true;
}
}
@ -150,7 +154,8 @@ void Player::trackPlayedTill(
state.position = position;
_updates.fire({ PlaybackUpdate<Track>{ position } });
}
if (_pauseReading && !bothReceivedEnough(kLoadInAdvanceFor)) {
if (_pauseReading
&& (!bothReceivedEnough(kLoadInAdvanceFor) || receivedTillEnd())) {
_pauseReading = false;
_file->wake();
++wakes;
@ -213,7 +218,14 @@ void Player::fileReady(Stream &&video, Stream &&audio) {
};
const auto mode = _options.mode;
if (audio.codec && (mode == Mode::Audio || mode == Mode::Both)) {
_audioId = AudioMsgId::ForVideo();
if (_options.audioId) {
_audioId = AudioMsgId(
_options.audioId.audio(),
_options.audioId.contextId(),
AudioMsgId::CreateExternalPlayId());
} else {
_audioId = AudioMsgId::ForVideo();
}
_audio = std::make_unique<AudioTrack>(
_options,
std::move(audio),
@ -429,6 +441,11 @@ bool Player::bothReceivedEnough(crl::time amount) const {
&& (!_video || trackReceivedEnough(info.video.state, amount));
}
bool Player::receivedTillEnd() const {
return (!_video || FullTrackReceived(_information.video.state))
&& (!_audio || FullTrackReceived(_information.audio.state));
}
void Player::checkResumeFromWaitingForData() {
if (_pausedByWaitingForData && bothReceivedEnough(kBufferFor)) {
_pausedByWaitingForData = false;
@ -446,8 +463,7 @@ void Player::start() {
_audio ? _audio->waitingForData() : rpl::never(),
_video ? _video->waitingForData() : rpl::never()
) | rpl::filter([=] {
return !FullTrackReceived(_information.video.state)
|| !FullTrackReceived(_information.audio.state);
return !receivedTillEnd();
}) | rpl::start_with_next([=] {
_pausedByWaitingForData = true;
updatePausedState();
@ -566,6 +582,38 @@ QImage Player::frame(const FrameRequest &request) const {
return _video->frame(request);
}
Media::Player::TrackState Player::prepareLegacyState() const {
using namespace Media::Player;
auto result = Media::Player::TrackState();
result.id = _audioId.externalPlayId() ? _audioId : _options.audioId;
result.state = finished()
? State::StoppedAtEnd
: paused()
? State::Paused
: State::Playing;
result.position = std::max(
_information.audio.state.position,
_information.video.state.position);
if (result.position == kTimeUnknown) {
result.position = _options.position;
}
result.length = std::max(
_information.audio.state.duration,
_information.video.state.duration);
if (result.length == kTimeUnknown && _options.audioId.audio()) {
const auto document = _options.audioId.audio();
const auto duration = document->song()
? document->song()->duration
: document->duration();
if (duration > 0) {
result.length = duration * crl::time(1000);
}
}
result.frequency = kMsFrequency;
return result;
}
rpl::lifetime &Player::lifetime() {
return _lifetime;
}

View File

@ -16,6 +16,12 @@ namespace Data {
class Session;
} // namespace Data
namespace Media {
namespace Player {
struct TrackState;
} // namespace Player
} // namespace Media
namespace Media {
namespace Streaming {
@ -54,6 +60,8 @@ public:
[[nodiscard]] QImage frame(const FrameRequest &request) const;
[[nodiscard]] Media::Player::TrackState prepareLegacyState() const;
[[nodiscard]] rpl::lifetime &lifetime();
~Player();
@ -95,6 +103,7 @@ private:
const TrackState &state,
crl::time amount) const;
[[nodiscard]] bool bothReceivedEnough(crl::time amount) const;
[[nodiscard]] bool receivedTillEnd() const;
void checkResumeFromWaitingForData();
template <typename Track>

View File

@ -379,9 +379,6 @@ auto Reader::Slices::fill(int offset, bytes::span buffer) -> FillResult {
if (cacheNotLoaded(sliceIndex)
&& !(_data[sliceIndex].flags & Flag::LoadingFromCache)) {
_data[sliceIndex].flags |= Flag::LoadingFromCache;
if (sliceIndex == 23) {
int a = 0;
}
result.sliceNumbersFromCache.add(sliceIndex + 1);
}
};

View File

@ -33,12 +33,6 @@ bool IsAlignedImage(const QImage &image) {
&& !(image.bytesPerLine() % kAlignImageBy);
}
void ClearFrameMemory(AVFrame *frame) {
if (frame && frame->data[0]) {
av_frame_unref(frame);
}
}
} // namespace
bool GoodStorageForFrame(const QImage &storage, QSize size) {
@ -112,6 +106,16 @@ FramePointer MakeFramePointer() {
return FramePointer(av_frame_alloc());
}
bool FrameHasData(AVFrame *frame) {
return (frame && frame->data[0] != nullptr);
}
void ClearFrameMemory(AVFrame *frame) {
if (FrameHasData(frame)) {
av_frame_unref(frame);
}
}
void FrameDeleter::operator()(AVFrame *value) {
av_frame_free(&value);
}
@ -288,17 +292,20 @@ bool GoodForRequest(const QImage &image, const FrameRequest &request) {
&& (request.resize == image.size());
}
QImage ConvertFrame(Stream &stream, QSize resize, QImage storage) {
Expects(stream.frame != nullptr);
QImage ConvertFrame(
Stream &stream,
AVFrame *frame,
QSize resize,
QImage storage) {
Expects(frame != nullptr);
const auto frame = stream.frame.get();
const auto frameSize = QSize(frame->width, frame->height);
if (frameSize.isEmpty()) {
LOG(("Streaming Error: Bad frame size %1,%2"
).arg(frameSize.width()
).arg(frameSize.height()));
return QImage();
} else if (!frame->data[0]) {
} else if (!FrameHasData(frame)) {
LOG(("Streaming Error: Bad frame data."));
return QImage();
}
@ -359,6 +366,8 @@ QImage ConvertFrame(Stream &stream, QSize resize, QImage storage) {
return QImage();
}
}
ClearFrameMemory(frame);
return storage;
}

View File

@ -119,13 +119,15 @@ struct CodecDeleter {
void operator()(AVCodecContext *value);
};
using CodecPointer = std::unique_ptr<AVCodecContext, CodecDeleter>;
CodecPointer MakeCodecPointer(not_null<AVStream*> stream);
[[nodiscard]] CodecPointer MakeCodecPointer(not_null<AVStream*> stream);
struct FrameDeleter {
void operator()(AVFrame *value);
};
using FramePointer = std::unique_ptr<AVFrame, FrameDeleter>;
FramePointer MakeFramePointer();
[[nodiscard]] FramePointer MakeFramePointer();
[[nodiscard]] bool FrameHasData(AVFrame *frame);
void ClearFrameMemory(AVFrame *frame);
struct SwsContextDeleter {
QSize resize;
@ -135,7 +137,7 @@ struct SwsContextDeleter {
void operator()(SwsContext *value);
};
using SwsContextPointer = std::unique_ptr<SwsContext, SwsContextDeleter>;
SwsContextPointer MakeSwsContextPointer(
[[nodiscard]] SwsContextPointer MakeSwsContextPointer(
not_null<AVFrame*> frame,
QSize resize,
SwsContextPointer *existing = nullptr);
@ -179,7 +181,8 @@ void LogError(QLatin1String method, AvErrorWrap error);
[[nodiscard]] bool GoodStorageForFrame(const QImage &storage, QSize size);
[[nodiscard]] QImage CreateFrameStorage(QSize size);
[[nodiscard]] QImage ConvertFrame(
Stream& stream,
Stream &stream,
AVFrame *frame,
QSize resize,
QImage storage);
[[nodiscard]] QImage PrepareByRequest(

View File

@ -85,9 +85,6 @@ private:
bool _queued = false;
base::ConcurrentTimer _readFramesTimer;
// For initial frame skipping for an exact seek.
FramePointer _initialSkippingFrame;
};
VideoTrackObject::VideoTrackObject(
@ -190,6 +187,7 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
_error();
return false;
}
std::swap(frame->decoded, _stream.frame);
frame->position = position;
frame->displayed = kTimeUnknown;
return true;
@ -204,6 +202,7 @@ void VideoTrackObject::presentFrameIfNeeded() {
frame->request = _request;
frame->original = ConvertFrame(
_stream,
frame->decoded.get(),
frame->request.resize,
std::move(frame->original));
if (frame->original.isNull()) {
@ -294,11 +293,7 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
auto frame = QImage();
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
if (!_initialSkippingFrame) {
return false;
}
// Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
return false;
@ -311,10 +306,6 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
} else if (_syncTimePoint.trackTime < _options.position) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = MakeFramePointer();
}
return true;
} else {
return processFirstFrame();
@ -322,7 +313,11 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
}
bool VideoTrackObject::processFirstFrame() {
auto frame = ConvertFrame(_stream, QSize(), QImage());
auto frame = ConvertFrame(
_stream,
_stream.frame.get(),
QSize(),
QImage());
if (frame.isNull()) {
return false;
}
@ -387,9 +382,9 @@ TimePoint VideoTrackObject::trackTime() const {
}
Assert(_resumedTime != kTimeUnknown);
if (_options.syncVideoByAudio && _audioId.playId()) {
if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
const auto mixer = Media::Player::mixer();
const auto point = mixer->getVideoSyncTimePoint(_audioId);
const auto point = mixer->getExternalSyncTimePoint(_audioId);
if (point && point.worldTime > _resumedTime) {
_syncTimePoint = point;
}

View File

@ -57,6 +57,7 @@ private:
friend class VideoTrackObject;
struct Frame {
FramePointer decoded = MakeFramePointer();
QImage original;
crl::time position = kTimeUnknown;
crl::time displayed = kTimeUnknown;

View File

@ -45,7 +45,6 @@ namespace Media {
namespace View {
namespace {
constexpr auto kMsFrequency = 1000; // 1000 ms per second.
constexpr auto kPreloadCount = 4;
// Preload X message ids before and after current.
@ -321,30 +320,6 @@ QImage OverlayWidget::videoFrame() const {
: _streamed->info.video.cover;
}
crl::time OverlayWidget::streamedPosition() const {
Expects(_streamed != nullptr);
const auto result = std::max(
_streamed->info.audio.state.position,
_streamed->info.video.state.position);
return (result != kTimeUnknown) ? result : crl::time(0);
}
crl::time OverlayWidget::streamedDuration() const {
Expects(_streamed != nullptr);
const auto result = std::max(
_streamed->info.audio.state.duration,
_streamed->info.video.state.duration);
if (result != kTimeUnknown) {
return result;
}
const auto duration = _doc->song()
? _doc->song()->duration
: _doc->duration();
return (duration > 0) ? duration * crl::time(1000) : kTimeUnknown;
}
bool OverlayWidget::documentContentShown() const {
return _doc && (!_current.isNull() || videoShown());
}
@ -1892,7 +1867,7 @@ void OverlayWidget::initStreaming() {
handleStreamingUpdate(std::move(update));
}, [=](Streaming::Error &&error) {
handleStreamingError(std::move(error));
}, _streamed->controls.lifetime());
}, _streamed->player.lifetime());
restartAtSeekPosition(0);
}
@ -1938,6 +1913,7 @@ void OverlayWidget::handleStreamingUpdate(Streaming::Update &&update) {
}, [&](UpdateVideo &update) {
_streamed->info.video.state.position = update.position;
this->update(contentRect());
Core::App().updateNonIdle();
updatePlaybackState();
}, [&](PreloadedAudio &update) {
_streamed->info.audio.state.receivedTill = update.till;
@ -2134,15 +2110,17 @@ void OverlayWidget::playbackToggleFullScreen() {
void OverlayWidget::updatePlaybackState() {
Expects(_streamed != nullptr);
auto state = Player::TrackState();
state.state = _streamed->player.finished()
? Player::State::StoppedAtEnd
: _streamed->player.paused()
? Player::State::Paused
: Player::State::Playing;
state.position = streamedPosition();
state.length = streamedDuration();
state.frequency = kMsFrequency;
auto state = _streamed->player.prepareLegacyState();
if (state.length == kTimeUnknown) {
const auto duration = _doc->song()
? _doc->song()->duration
: _doc->duration();
if (duration > 0) {
state.length = std::max(
duration * crl::time(1000),
state.position);
}
}
if (state.position != kTimeUnknown && state.length != kTimeUnknown) {
_streamed->controls.updatePlayback(state);
}

View File

@ -282,8 +282,6 @@ private:
[[nodiscard]] QSize videoSize() const;
[[nodiscard]] bool videoIsGifv() const;
[[nodiscard]] QImage videoFrame() const;
[[nodiscard]] crl::time streamedPosition() const;
[[nodiscard]] crl::time streamedDuration() const;
[[nodiscard]] bool documentContentShown() const;
[[nodiscard]] bool documentBubbleShown() const;
void clearStreaming();

View File

@ -849,15 +849,18 @@ bool Voice::updateStatusText() {
statusSize = FileStatusSizeFailed;
} else if (_data->loaded()) {
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.playId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
} else {
statusSize = FileStatusSizeReady;
}
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.externalPlayId())
&& !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (statusSize != _status.size()) {
_status.update(statusSize, _data->size, duration(), realDuration);
}
@ -943,7 +946,7 @@ void Document::paint(Painter &p, const QRect &clip, TextSelection selection, con
if (selected) {
p.setBrush(st::msgFileInBgSelected);
} else {
auto over = ClickHandler::showAsActive(loaded ? _openl : (_data->loading() ? _cancell : _openl));
auto over = ClickHandler::showAsActive((_data->loading() || _data->uploading()) ? _cancell : _data->canBePlayed() ? _openl : _openl);
p.setBrush(anim::brush(_st.songIconBg, _st.songOverBg, _a_iconOver.current(context->ms, over ? 1. : 0.)));
}
@ -961,10 +964,10 @@ void Document::paint(Painter &p, const QRect &clip, TextSelection selection, con
auto icon = [&] {
if (showPause) {
return &(selected ? _st.songPauseSelected : _st.songPause);
} else if (loaded) {
return &(selected ? _st.songPlaySelected : _st.songPlay);
} else if (_data->loading()) {
} else if (_data->loading() || _data->uploading()) {
return &(selected ? _st.songCancelSelected : _st.songCancel);
} else if (_data->canBePlayed()) {
return &(selected ? _st.songPlaySelected : _st.songPlay);
}
return &(selected ? _st.songDownloadSelected : _st.songDownload);
}();
@ -1100,10 +1103,10 @@ TextState Document::getState(
_st.songThumbSize,
_width);
if (inner.contains(point)) {
const auto link = loaded
? _openl
: (_data->loading() || _data->uploading())
const auto link = (_data->loading() || _data->uploading())
? _cancell
: _data->canBePlayed()
? _openl
: _openl;
return { parent(), link };
}
@ -1217,23 +1220,23 @@ bool Document::updateStatusText() {
} else if (_data->loading()) {
statusSize = _data->loadOffset();
} else if (_data->loaded()) {
if (_data->isSong()) {
statusSize = FileStatusSizeLoaded;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, parent()->fullId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
} else {
statusSize = FileStatusSizeLoaded;
}
statusSize = FileStatusSizeLoaded;
} else {
statusSize = FileStatusSizeReady;
}
if (_data->isSong()) {
const auto state = Media::Player::instance()->getState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.externalPlayId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
statusSize = -1 - (state.position / state.frequency);
realDuration = (state.length / state.frequency);
showPause = Media::Player::ShowPauseIcon(state.state);
}
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId(), state.id.externalPlayId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
showPause = true;
}
}
if (statusSize != _status.size()) {
_status.update(statusSize, _data->size, _data->isSong() ? _data->song()->duration : -1, realDuration);
}

View File

@ -440,8 +440,6 @@
<(src_loc)/media/clip/media_clip_reader.h
<(src_loc)/media/player/media_player_button.cpp
<(src_loc)/media/player/media_player_button.h
<(src_loc)/media/player/media_player_cover.cpp
<(src_loc)/media/player/media_player_cover.h
<(src_loc)/media/player/media_player_float.cpp
<(src_loc)/media/player/media_player_float.h
<(src_loc)/media/player/media_player_instance.cpp