Play video messages as Type::Voice.

Use AudioMsgId instead of videoPlayId.
Any audio track now can be a child loader track of some video clip.
Use Type::Voice instead of Type::Video for round video messages.
Video messages play / pause / resume the same way as voice messages.
This commit is contained in:
John Preston 2017-05-18 23:18:59 +03:00
parent 1e6d4d6b41
commit b9119e5ef6
27 changed files with 466 additions and 440 deletions

View File

@ -276,9 +276,6 @@ mediaPlayerSuppressDuration: 150;
botDescSkip: 8px; botDescSkip: 8px;
suppressAll: 0.2;
suppressSong: 0.05;
inlineResultsLeft: 11px; inlineResultsLeft: 11px;
inlineResultsSkip: 3px; inlineResultsSkip: 3px;
inlineMediaHeight: 96px; inlineMediaHeight: 96px;

View File

@ -145,7 +145,7 @@ void SendFilesBox::prepareGifPreview() {
return _animated; return _animated;
}; };
if (createGifPreview()) { if (createGifPreview()) {
_gifPreview = Media::Clip::MakeReader(FileLocation(_files.front()), QByteArray(), [this](Media::Clip::Notification notification) { _gifPreview = Media::Clip::MakeReader(_files.front(), [this](Media::Clip::Notification notification) {
clipCallback(notification); clipCallback(notification);
}); });
if (_gifPreview) _gifPreview->setAutoplay(); if (_gifPreview) _gifPreview->setAutoplay();
@ -575,7 +575,7 @@ void EditCaptionBox::prepareGifPreview(DocumentData *document) {
}; };
auto createGifPreviewResult = createGifPreview(); // Clang freeze workaround. auto createGifPreviewResult = createGifPreview(); // Clang freeze workaround.
if (createGifPreviewResult) { if (createGifPreviewResult) {
_gifPreview = Media::Clip::MakeReader(document->location(), document->data(), [this](Media::Clip::Notification notification) { _gifPreview = Media::Clip::MakeReader(document, _msgId, [this](Media::Clip::Notification notification) {
clipCallback(notification); clipCallback(notification);
}); });
if (_gifPreview) _gifPreview->setAutoplay(); if (_gifPreview) _gifPreview->setAutoplay();

View File

@ -29,6 +29,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include "ui/effects/ripple_animation.h" #include "ui/effects/ripple_animation.h"
#include "storage/file_upload.h" #include "storage/file_upload.h"
#include "auth_session.h" #include "auth_session.h"
#include "media/media_audio.h"
#include "messenger.h" #include "messenger.h"
namespace { namespace {
@ -947,7 +948,7 @@ void HistoryItem::clipCallback(Media::Clip::Notification notification) {
return; return;
} }
auto reader = media ? media->getClipReader() : nullptr; auto reader = media->getClipReader();
if (!reader) { if (!reader) {
return; return;
} }
@ -983,6 +984,32 @@ void HistoryItem::clipCallback(Media::Clip::Notification notification) {
} }
} }
void HistoryItem::audioTrackUpdated() {
auto media = getMedia();
if (!media) {
return;
}
auto reader = media->getClipReader();
if (!reader || reader->mode() != Media::Clip::Reader::Mode::Video) {
return;
}
auto audio = reader->audioMsgId();
auto current = Media::Player::mixer()->currentState(audio.type());
if (current.id != audio || Media::Player::IsStopped(current.state) || current.state == Media::Player::State::Finishing) {
media->stopInline();
} else if (Media::Player::IsPaused(current.state) || current.state == Media::Player::State::Pausing) {
if (!reader->videoPaused()) {
reader->pauseResumeVideo();
}
} else {
if (reader->videoPaused()) {
reader->pauseResumeVideo();
}
}
}
void HistoryItem::recountDisplayDate() { void HistoryItem::recountDisplayDate() {
bool displayingDate = ([this]() { bool displayingDate = ([this]() {
if (isEmpty()) { if (isEmpty()) {

View File

@ -851,6 +851,7 @@ public:
} }
void clipCallback(Media::Clip::Notification notification); void clipCallback(Media::Clip::Notification notification);
void audioTrackUpdated();
~HistoryItem(); ~HistoryItem();

View File

@ -1840,9 +1840,6 @@ int HistoryGif::resizeGetHeight(int width) {
auto roundCorners = (isRound || inWebPage) ? ImageRoundCorner::All : ((isBubbleTop() ? (ImageRoundCorner::TopLeft | ImageRoundCorner::TopRight) : ImageRoundCorner::None) auto roundCorners = (isRound || inWebPage) ? ImageRoundCorner::All : ((isBubbleTop() ? (ImageRoundCorner::TopLeft | ImageRoundCorner::TopRight) : ImageRoundCorner::None)
| ((isBubbleBottom() && _caption.isEmpty()) ? (ImageRoundCorner::BottomLeft | ImageRoundCorner::BottomRight) : ImageRoundCorner::None)); | ((isBubbleBottom() && _caption.isEmpty()) ? (ImageRoundCorner::BottomLeft | ImageRoundCorner::BottomRight) : ImageRoundCorner::None));
_gif->start(_thumbw, _thumbh, _width, _height, roundRadius, roundCorners); _gif->start(_thumbw, _thumbh, _width, _height, roundRadius, roundCorners);
if (isRound) {
Media::Player::mixer()->setVideoVolume(1.);
}
} }
} else { } else {
_width = qMax(_width, gifMaxStatusWidth(_data) + 2 * int32(st::msgDateImgDelta + st::msgDateImgPadding.x())); _width = qMax(_width, gifMaxStatusWidth(_data) + 2 * int32(st::msgDateImgDelta + st::msgDateImgPadding.x()));
@ -2325,18 +2322,20 @@ void HistoryGif::updateStatusText() const {
if (_gif && _gif->mode() == Media::Clip::Reader::Mode::Video) { if (_gif && _gif->mode() == Media::Clip::Reader::Mode::Video) {
statusSize = -1 - _data->duration(); statusSize = -1 - _data->duration();
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Video); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.length) { if (state.id == _gif->audioMsgId()) {
auto position = int64(0); if (state.length) {
if (!Media::Player::IsStopped(state.state) && state.state != Media::Player::State::Finishing) { auto position = int64(0);
position = state.position; if (!Media::Player::IsStopped(state.state) && state.state != Media::Player::State::Finishing) {
} else if (state.state == Media::Player::State::StoppedAtEnd) { position = state.position;
position = state.length; } else if (state.state == Media::Player::State::StoppedAtEnd) {
position = state.length;
}
accumulate_max(statusSize, -1 - int((state.length - position) / state.frequency + 1));
}
if (_roundPlayback) {
_roundPlayback->updateState(state);
} }
accumulate_max(statusSize, -1 - int((state.length - position) / state.frequency + 1));
}
if (_roundPlayback) {
_roundPlayback->updateState(state);
} }
} }
} else { } else {
@ -2395,8 +2394,14 @@ bool HistoryGif::playInline(bool autoplay) {
if (_data->isRoundVideo() && _gif) { if (_data->isRoundVideo() && _gif) {
// Stop autoplayed silent video when we start playback by click. // Stop autoplayed silent video when we start playback by click.
// Stop finished video message when autoplay starts. // Stop finished video message when autoplay starts.
if ((!autoplay && _gif->mode() == Mode::Gif) if (!autoplay) {
|| (autoplay && _gif->mode() == Mode::Video && _gif->state() == Media::Clip::State::Finished)) { if (_gif->mode() == Mode::Gif) {
stopInline();
} else {
_gif->pauseResumeVideo();
return true;
}
} else if (autoplay && _gif->mode() == Mode::Video && _gif->state() == Media::Clip::State::Finished) {
stopInline(); stopInline();
} }
} }
@ -2407,7 +2412,7 @@ bool HistoryGif::playInline(bool autoplay) {
App::stopGifItems(); App::stopGifItems();
} }
auto mode = (!autoplay && _data->isRoundVideo()) ? Mode::Video : Mode::Gif; auto mode = (!autoplay && _data->isRoundVideo()) ? Mode::Video : Mode::Gif;
setClipReader(Media::Clip::MakeReader(_data->location(), _data->data(), [this](Media::Clip::Notification notification) { setClipReader(Media::Clip::MakeReader(_data, _parent->fullId(), [this](Media::Clip::Notification notification) {
_parent->clipCallback(notification); _parent->clipCallback(notification);
}, mode)); }, mode));
if (mode == Mode::Video) { if (mode == Mode::Video) {

View File

@ -139,7 +139,7 @@ void Gif::paint(Painter &p, const QRect &clip, const PaintContext *context) cons
bool loaded = document->loaded(), loading = document->loading(), displayLoading = document->displayLoading(); bool loaded = document->loaded(), loading = document->loading(), displayLoading = document->displayLoading();
if (loaded && !_gif && !_gif.isBad()) { if (loaded && !_gif && !_gif.isBad()) {
auto that = const_cast<Gif*>(this); auto that = const_cast<Gif*>(this);
that->_gif = Media::Clip::MakeReader(document->location(), document->data(), [that](Media::Clip::Notification notification) { that->_gif = Media::Clip::MakeReader(document, FullMsgId(), [that](Media::Clip::Notification notification) {
that->clipCallback(notification); that->clipCallback(notification);
}); });
if (_gif) _gif->setAutoplay(); if (_gif) _gif->setAutoplay();
@ -1191,7 +1191,7 @@ void Game::paint(Painter &p, const QRect &clip, const PaintContext *context) con
bool loaded = document->loaded(), loading = document->loading(), displayLoading = document->displayLoading(); bool loaded = document->loaded(), loading = document->loading(), displayLoading = document->displayLoading();
if (loaded && !_gif && !_gif.isBad()) { if (loaded && !_gif && !_gif.isBad()) {
auto that = const_cast<Game*>(this); auto that = const_cast<Game*>(this);
that->_gif = Media::Clip::MakeReader(document->location(), document->data(), [that](Media::Clip::Notification notification) { that->_gif = Media::Clip::MakeReader(document, FullMsgId(), [that](Media::Clip::Notification notification) {
that->clipCallback(notification); that->clipCallback(notification);
}); });
if (_gif) _gif->setAutoplay(); if (_gif) _gif->setAutoplay();

View File

@ -920,7 +920,7 @@ QPixmap MediaPreviewWidget::currentImage() const {
if (_document->loaded()) { if (_document->loaded()) {
if (!_gif && !_gif.isBad()) { if (!_gif && !_gif.isBad()) {
auto that = const_cast<MediaPreviewWidget*>(this); auto that = const_cast<MediaPreviewWidget*>(this);
that->_gif = Media::Clip::MakeReader(_document->location(), _document->data(), [this, that](Media::Clip::Notification notification) { that->_gif = Media::Clip::MakeReader(_document, FullMsgId(), [this, that](Media::Clip::Notification notification) {
that->clipCallback(notification); that->clipCallback(notification);
}); });
if (_gif) _gif->setAutoplay(); if (_gif) _gif->setAutoplay();

View File

@ -1553,6 +1553,7 @@ void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
if (auto item = App::histItemById(audioId.contextId())) { if (auto item = App::histItemById(audioId.contextId())) {
Ui::repaintHistoryItem(item); Ui::repaintHistoryItem(item);
item->audioTrackUpdated();
} }
if (auto items = InlineBots::Layout::documentItems()) { if (auto items = InlineBots::Layout::documentItems()) {
for (auto item : items->value(audioId.audio())) { for (auto item : items->value(audioId.audio())) {

View File

@ -42,8 +42,11 @@ QMutex AudioMutex;
ALCdevice *AudioDevice = nullptr; ALCdevice *AudioDevice = nullptr;
ALCcontext *AudioContext = nullptr; ALCcontext *AudioContext = nullptr;
auto suppressAllGain = 1.; constexpr auto kSuppressRatioAll = 0.2;
auto suppressSongGain = 1.; constexpr auto kSuppressRatioSong = 0.05;
auto VolumeMultiplierAll = 1.;
auto VolumeMultiplierSong = 1.;
} // namespace } // namespace
@ -249,7 +252,7 @@ void StopDetachIfNotUsedSafe() {
namespace Player { namespace Player {
namespace { namespace {
constexpr auto kVideoVolumeRound = 10000; constexpr auto kVolumeRound = 10000;
constexpr auto kPreloadSamples = 2LL * 48000; // preload next part if less than 2 seconds remains constexpr auto kPreloadSamples = 2LL * 48000; // preload next part if less than 2 seconds remains
constexpr auto kFadeDuration = TimeMs(500); constexpr auto kFadeDuration = TimeMs(500);
constexpr auto kCheckPlaybackPositionTimeout = TimeMs(100); // 100ms per check audio position constexpr auto kCheckPlaybackPositionTimeout = TimeMs(100); // 100ms per check audio position
@ -264,11 +267,12 @@ base::Observable<AudioMsgId> &Updated() {
return UpdatedObservable; return UpdatedObservable;
} }
// Thread: Any. Must be locked: AudioMutex.
float64 ComputeVolume(AudioMsgId::Type type) { float64 ComputeVolume(AudioMsgId::Type type) {
switch (type) { switch (type) {
case AudioMsgId::Type::Voice: return suppressAllGain; case AudioMsgId::Type::Voice: return VolumeMultiplierAll;
case AudioMsgId::Type::Song: return suppressSongGain * Global::SongVolume(); case AudioMsgId::Type::Song: return VolumeMultiplierSong * mixer()->getSongVolume();
case AudioMsgId::Type::Video: return suppressSongGain * mixer()->getVideoVolume(); case AudioMsgId::Type::Video: return mixer()->getVideoVolume();
} }
return 1.; return 1.;
} }
@ -349,7 +353,8 @@ void Mixer::Track::clear() {
} }
videoData = nullptr; videoData = nullptr;
videoPlayId = 0; lastUpdateWhen = 0;
lastUpdateCorrectedMs = 0;
} }
void Mixer::Track::started() { void Mixer::Track::started() {
@ -432,8 +437,11 @@ void Mixer::Track::resetStream() {
} }
} }
Mixer::Track::~Track() = default;
Mixer::Mixer() Mixer::Mixer()
: _videoVolume(kVideoVolumeRound) : _volumeVideo(kVolumeRound)
, _volumeSong(kVolumeRound)
, _fader(new Fader(&_faderThread)) , _fader(new Fader(&_faderThread))
, _loader(new Loaders(&_loaderThread)) { , _loader(new Loaders(&_loaderThread)) {
connect(this, SIGNAL(faderOnTimer()), _fader, SLOT(onTimer()), Qt::QueuedConnection); connect(this, SIGNAL(faderOnTimer()), _fader, SLOT(onTimer()), Qt::QueuedConnection);
@ -483,7 +491,7 @@ Mixer::~Mixer() {
} }
void Mixer::onUpdated(const AudioMsgId &audio) { void Mixer::onUpdated(const AudioMsgId &audio) {
if (audio.type() == AudioMsgId::Type::Video) { if (audio.playId()) {
videoSoundProgress(audio); videoSoundProgress(audio);
} }
Media::Player::Updated().notify(audio); Media::Player::Updated().notify(audio);
@ -491,15 +499,29 @@ void Mixer::onUpdated(const AudioMsgId &audio) {
void Mixer::onError(const AudioMsgId &audio) { void Mixer::onError(const AudioMsgId &audio) {
emit stoppedOnError(audio); emit stoppedOnError(audio);
if (audio.type() == AudioMsgId::Type::Voice) {
emit unsuppressSong(); QMutexLocker lock(&AudioMutex);
auto type = audio.type();
if (type == AudioMsgId::Type::Voice) {
if (auto current = trackForType(type)) {
if (current->state.id == audio) {
emit unsuppressSong();
}
}
} }
} }
void Mixer::onStopped(const AudioMsgId &audio) { void Mixer::onStopped(const AudioMsgId &audio) {
emit updated(audio); emit updated(audio);
if (audio.type() == AudioMsgId::Type::Voice) {
emit unsuppressSong(); QMutexLocker lock(&AudioMutex);
auto type = audio.type();
if (type == AudioMsgId::Type::Voice) {
if (auto current = trackForType(type)) {
if (current->state.id == audio) {
emit unsuppressSong();
}
}
} }
} }
@ -591,6 +613,13 @@ bool Mixer::fadedStop(AudioMsgId::Type type, bool *fadedStart) {
} }
void Mixer::play(const AudioMsgId &audio, int64 position) { void Mixer::play(const AudioMsgId &audio, int64 position) {
setSongVolume(Global::SongVolume());
play(audio, std::unique_ptr<VideoSoundData>(), position);
}
void Mixer::play(const AudioMsgId &audio, std::unique_ptr<VideoSoundData> videoData, int64 position) {
Expects(!videoData || audio.playId() != 0);
auto type = audio.type(); auto type = audio.type();
AudioMsgId stopped; AudioMsgId stopped;
auto notLoadedYet = false; auto notLoadedYet = false;
@ -599,10 +628,31 @@ void Mixer::play(const AudioMsgId &audio, int64 position) {
Audio::AttachToDevice(); Audio::AttachToDevice();
if (!AudioDevice) return; if (!AudioDevice) return;
bool fadedStart = false; auto fadedStart = false;
auto current = trackForType(type); auto current = trackForType(type);
if (!current) return; if (!current) return;
if (type == AudioMsgId::Type::Video) {
auto pauseType = [this](AudioMsgId::Type type) {
auto current = trackForType(type);
switch (current->state.state) {
case State::Starting:
case State::Resuming:
case State::Playing: {
current->state.state = State::Pausing;
resetFadeStartPosition(type);
} break;
case State::Finishing: {
current->state.state = State::Pausing;
} break;
}
};
pauseType(AudioMsgId::Type::Song);
pauseType(AudioMsgId::Type::Voice);
}
if (current->state.id != audio) { if (current->state.id != audio) {
if (fadedStop(type, &fadedStart)) { if (fadedStop(type, &fadedStart)) {
stopped = current->state.id; stopped = current->state.id;
@ -611,42 +661,49 @@ void Mixer::play(const AudioMsgId &audio, int64 position) {
emit loaderOnCancel(current->state.id); emit loaderOnCancel(current->state.id);
emit faderOnTimer(); emit faderOnTimer();
} }
if (type == AudioMsgId::Type::Video) {
auto foundCurrent = currentIndex(type); current->clear();
auto index = 0;
for (; index != kTogetherLimit; ++index) {
if (trackForType(type, index)->state.id == audio) {
*foundCurrent = index;
break;
}
}
if (index == kTogetherLimit && ++*foundCurrent >= kTogetherLimit) {
*foundCurrent -= kTogetherLimit;
}
current = trackForType(type);
}
current->state.id = audio;
current->file = audio.audio()->location(true);
current->data = audio.audio()->data();
if (current->file.isEmpty() && current->data.isEmpty()) {
notLoadedYet = true;
if (audio.type() == AudioMsgId::Type::Song) {
setStoppedState(current);
} else { } else {
setStoppedState(current, State::StoppedAtError); auto foundCurrent = currentIndex(type);
auto index = 0;
for (; index != kTogetherLimit; ++index) {
if (trackForType(type, index)->state.id == audio) {
*foundCurrent = index;
break;
}
}
if (index == kTogetherLimit && ++*foundCurrent >= kTogetherLimit) {
*foundCurrent -= kTogetherLimit;
}
current = trackForType(type);
} }
}
current->state.id = audio;
current->lastUpdateWhen = 0;
current->lastUpdateCorrectedMs = 0;
if (videoData) {
current->videoData = std::move(videoData);
} else {
current->file = audio.audio()->location(true);
current->data = audio.audio()->data();
notLoadedYet = (current->file.isEmpty() && current->data.isEmpty());
}
if (notLoadedYet) {
auto newState = (type == AudioMsgId::Type::Song) ? State::Stopped : State::StoppedAtError;
setStoppedState(current, newState);
} else { } else {
current->state.position = position; current->state.position = position;
current->state.state = fadedStart ? State::Starting : State::Playing; current->state.state = current->videoData ? State::Paused : fadedStart ? State::Starting : State::Playing;
current->loading = true; current->loading = true;
emit loaderOnStart(audio, position); emit loaderOnStart(current->state.id, position);
if (type == AudioMsgId::Type::Voice) { if (type == AudioMsgId::Type::Voice) {
emit suppressSong(); emit suppressSong();
} }
} }
} }
if (notLoadedYet) { if (notLoadedYet) {
if (audio.type() == AudioMsgId::Type::Song) { if (type == AudioMsgId::Type::Song || type == AudioMsgId::Type::Video) {
DocumentOpenClickHandler::doOpen(audio.audio(), App::histItemById(audio.contextId())); DocumentOpenClickHandler::doOpen(audio.audio(), App::histItemById(audio.contextId()));
} else { } else {
onError(audio); onError(audio);
@ -657,83 +714,58 @@ void Mixer::play(const AudioMsgId &audio, int64 position) {
} }
} }
void Mixer::initFromVideo(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data, int64 position) { void Mixer::feedFromVideo(VideoSoundPart &&part) {
AudioMsgId stopped; _loader->feedFromVideo(std::move(part));
{
QMutexLocker lock(&AudioMutex);
// Pause current song.
auto songType = AudioMsgId::Type::Song;
auto currentSong = trackForType(songType);
switch (currentSong->state.state) {
case State::Starting:
case State::Resuming:
case State::Playing: {
currentSong->state.state = State::Pausing;
resetFadeStartPosition(songType);
} break;
case State::Finishing: {
currentSong->state.state = State::Pausing;
} break;
}
auto type = AudioMsgId::Type::Video;
auto current = trackForType(type);
t_assert(current != nullptr);
if (current->state.id) {
fadedStop(type);
stopped = current->state.id;
emit loaderOnCancel(current->state.id);
}
emit faderOnTimer();
current->clear();
current->state.id = AudioMsgId(AudioMsgId::Type::Video);
current->videoPlayId = videoPlayId;
current->videoData = std::move(data);
{
QMutexLocker videoLock(&_lastVideoMutex);
_lastVideoPlayId = current->videoPlayId;
_lastVideoPlaybackWhen = 0;
_lastVideoPlaybackCorrectedMs = 0;
}
_loader->startFromVideo(current->videoPlayId);
current->state.state = State::Paused;
current->loading = true;
emit loaderOnStart(current->state.id, position);
}
if (stopped) emit updated(stopped);
} }
void Mixer::stopFromVideo(uint64 videoPlayId) { TimeMs Mixer::getVideoCorrectedTime(const AudioMsgId &audio, TimeMs frameMs, TimeMs systemMs) {
auto result = frameMs;
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
auto track = trackForType(type);
if (track && track->state.id == audio && track->lastUpdateWhen > 0) {
result = static_cast<TimeMs>(track->lastUpdateCorrectedMs);
if (systemMs > track->lastUpdateWhen) {
result += (systemMs - track->lastUpdateWhen);
}
}
return result;
}
void Mixer::videoSoundProgress(const AudioMsgId &audio) {
auto type = audio.type();
QMutexLocker lock(&AudioMutex);
auto current = trackForType(type);
if (current && current->state.length && current->state.frequency) {
if (current->state.id == audio && current->state.state == State::Playing) {
current->lastUpdateWhen = getms();
current->lastUpdateCorrectedMs = (current->state.position * 1000ULL) / current->state.frequency;
}
}
}
bool Mixer::checkCurrentALError(AudioMsgId::Type type) {
if (!Audio::PlaybackErrorHappened()) return true;
auto data = trackForType(type);
if (!data) {
setStoppedState(data, State::StoppedAtError);
onError(data->state.id);
}
return false;
}
void Mixer::pause(const AudioMsgId &audio, bool fast) {
AudioMsgId current; AudioMsgId current;
{ {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
auto track = trackForType(AudioMsgId::Type::Video); auto type = audio.type();
t_assert(track != nullptr);
if (track->videoPlayId != videoPlayId) {
return;
}
current = track->state.id;
fadedStop(AudioMsgId::Type::Video);
track->clear();
}
if (current) emit updated(current);
}
void Mixer::pauseFromVideo(uint64 videoPlayId) {
AudioMsgId current;
{
QMutexLocker lock(&AudioMutex);
auto type = AudioMsgId::Type::Video;
auto track = trackForType(type); auto track = trackForType(type);
t_assert(track != nullptr); if (!track || track->state.id != audio) {
if (track->videoPlayId != videoPlayId) {
return; return;
} }
@ -742,41 +774,44 @@ void Mixer::pauseFromVideo(uint64 videoPlayId) {
case State::Starting: case State::Starting:
case State::Resuming: case State::Resuming:
case State::Playing: { case State::Playing: {
track->state.state = State::Paused; track->state.state = fast ? State::Paused : State::Pausing;
resetFadeStartPosition(type); resetFadeStartPosition(type);
if (type == AudioMsgId::Type::Voice) {
if (track->isStreamCreated()) { emit unsuppressSong();
ALint state = AL_INITIAL;
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
if (!checkCurrentALError(type)) return;
if (state == AL_PLAYING) {
alSourcePause(track->stream.source);
if (!checkCurrentALError(type)) return;
}
} }
} break; } break;
case State::Finishing: {
track->state.state = fast ? State::Paused : State::Pausing;
} break;
} }
if (fast && track->isStreamCreated()) {
ALint state = AL_INITIAL;
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
if (!checkCurrentALError(type)) return;
if (state == AL_PLAYING) {
alSourcePause(track->stream.source);
if (!checkCurrentALError(type)) return;
}
}
emit faderOnTimer(); emit faderOnTimer();
QMutexLocker videoLock(&_lastVideoMutex); track->lastUpdateWhen = 0;
if (_lastVideoPlayId == videoPlayId) { track->lastUpdateCorrectedMs = 0;
_lastVideoPlaybackWhen = 0;
_lastVideoPlaybackCorrectedMs = 0;
}
} }
if (current) emit updated(current); if (current) emit updated(current);
} }
void Mixer::resumeFromVideo(uint64 videoPlayId) { void Mixer::resume(const AudioMsgId &audio, bool fast) {
AudioMsgId current; AudioMsgId current;
{ {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
auto type = AudioMsgId::Type::Video; auto type = audio.type();
auto track = trackForType(type); auto track = trackForType(type);
t_assert(track != nullptr); if (!track || track->state.id != audio) {
if (track->videoPlayId != videoPlayId) {
return; return;
} }
@ -797,7 +832,7 @@ void Mixer::resumeFromVideo(uint64 videoPlayId) {
} }
} }
} }
track->state.state = State::Playing; track->state.state = fast ? State::Playing : State::Resuming;
if (track->isStreamCreated()) { if (track->isStreamCreated()) {
// When starting the video audio is in paused state and // When starting the video audio is in paused state and
@ -817,6 +852,9 @@ void Mixer::resumeFromVideo(uint64 videoPlayId) {
alSourcePlay(track->stream.source); alSourcePlay(track->stream.source);
if (!checkCurrentALError(type)) return; if (!checkCurrentALError(type)) return;
} }
if (type == AudioMsgId::Type::Voice) {
emit suppressSong();
}
} }
} break; } break;
} }
@ -825,104 +863,6 @@ void Mixer::resumeFromVideo(uint64 videoPlayId) {
if (current) emit updated(current); if (current) emit updated(current);
} }
void Mixer::feedFromVideo(VideoSoundPart &&part) {
_loader->feedFromVideo(std::move(part));
}
TimeMs Mixer::getVideoCorrectedTime(uint64 playId, TimeMs frameMs, TimeMs systemMs) {
auto result = frameMs;
QMutexLocker videoLock(&_lastVideoMutex);
if (_lastVideoPlayId == playId && _lastVideoPlaybackWhen > 0) {
result = static_cast<TimeMs>(_lastVideoPlaybackCorrectedMs);
if (systemMs > _lastVideoPlaybackWhen) {
result += (systemMs - _lastVideoPlaybackWhen);
}
}
return result;
}
void Mixer::videoSoundProgress(const AudioMsgId &audio) {
auto type = audio.type();
t_assert(type == AudioMsgId::Type::Video);
QMutexLocker lock(&AudioMutex);
QMutexLocker videoLock(&_lastVideoMutex);
auto current = trackForType(type);
t_assert(current != nullptr);
if (current->videoPlayId == _lastVideoPlayId && current->state.length && current->state.frequency) {
if (current->state.state == State::Playing) {
_lastVideoPlaybackWhen = getms();
_lastVideoPlaybackCorrectedMs = (current->state.position * 1000ULL) / current->state.frequency;
}
}
}
bool Mixer::checkCurrentALError(AudioMsgId::Type type) {
if (!Audio::PlaybackErrorHappened()) return true;
auto data = trackForType(type);
if (!data) {
setStoppedState(data, State::StoppedAtError);
onError(data->state.id);
}
return false;
}
void Mixer::pauseresume(AudioMsgId::Type type, bool fast) {
QMutexLocker lock(&AudioMutex);
auto current = trackForType(type);
switch (current->state.state) {
case State::Pausing:
case State::Paused:
case State::PausedAtEnd: {
Audio::AttachToDevice();
if (current->state.state == State::Paused) {
resetFadeStartPosition(type);
} else if (current->state.state == State::PausedAtEnd) {
if (current->isStreamCreated()) {
alSourcei(current->stream.source, AL_SAMPLE_OFFSET, qMax(current->state.position - current->bufferedPosition, 0LL));
if (!checkCurrentALError(type)) return;
}
}
current->state.state = fast ? State::Playing : State::Resuming;
ALint state = AL_INITIAL;
alGetSourcei(current->stream.source, AL_SOURCE_STATE, &state);
if (!checkCurrentALError(type)) return;
if (state != AL_PLAYING) {
if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
return;
}
alSourcef(current->stream.source, AL_GAIN, ComputeVolume(type));
if (!checkCurrentALError(type)) return;
alSourcePlay(current->stream.source);
if (!checkCurrentALError(type)) return;
}
if (type == AudioMsgId::Type::Voice) emit suppressSong();
} break;
case State::Starting:
case State::Resuming:
case State::Playing: {
current->state.state = State::Pausing;
resetFadeStartPosition(type);
if (type == AudioMsgId::Type::Voice) emit unsuppressSong();
} break;
case State::Finishing: {
current->state.state = State::Pausing;
} break;
}
emit faderOnTimer();
}
void Mixer::seek(AudioMsgId::Type type, int64 position) { void Mixer::seek(AudioMsgId::Type type, int64 position) {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
@ -957,7 +897,7 @@ void Mixer::seek(AudioMsgId::Type type, int64 position) {
current->state.state = State::Paused; current->state.state = State::Paused;
} }
lock.unlock(); lock.unlock();
return pauseresume(type, true); return resume(audio, true);
} break; } break;
case State::Starting: case State::Starting:
case State::Resuming: case State::Resuming:
@ -979,16 +919,21 @@ void Mixer::seek(AudioMsgId::Type type, int64 position) {
emit faderOnTimer(); emit faderOnTimer();
} }
void Mixer::stop(AudioMsgId::Type type) { void Mixer::stop(const AudioMsgId &audio) {
AudioMsgId current; AudioMsgId current;
{ {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
auto type = audio.type();
auto track = trackForType(type); auto track = trackForType(type);
t_assert(track != nullptr); if (!track || track->state.id != audio) {
return;
}
current = track->state.id; current = track->state.id;
fadedStop(type); fadedStop(type);
if (type == AudioMsgId::Type::Video) { if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong();
} else if (type == AudioMsgId::Type::Video) {
track->clear(); track->clear();
} }
} }
@ -1026,19 +971,9 @@ void Mixer::stopAndClear() {
clearAndCancel(AudioMsgId::Type::Song, index); clearAndCancel(AudioMsgId::Type::Song, index);
} }
_videoTrack.clear(); _videoTrack.clear();
_loader->stopFromVideo();
} }
} }
TrackState Mixer::currentVideoState(uint64 videoPlayId) {
QMutexLocker lock(&AudioMutex);
auto current = trackForType(AudioMsgId::Type::Video);
if (!current || current->videoPlayId != videoPlayId) {
return TrackState();
}
return current->state;
}
TrackState Mixer::currentState(AudioMsgId::Type type) { TrackState Mixer::currentState(AudioMsgId::Type type) {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
auto current = trackForType(type); auto current = trackForType(type);
@ -1106,18 +1041,26 @@ void Mixer::reattachTracks() {
_videoTrack.reattach(AudioMsgId::Type::Video); _videoTrack.reattach(AudioMsgId::Type::Video);
} }
void Mixer::setSongVolume(float64 volume) {
_volumeSong.storeRelease(qRound(volume * kVolumeRound));
}
float64 Mixer::getSongVolume() const {
return float64(_volumeSong.loadAcquire()) / kVolumeRound;
}
void Mixer::setVideoVolume(float64 volume) { void Mixer::setVideoVolume(float64 volume) {
_videoVolume.storeRelease(qRound(volume * kVideoVolumeRound)); _volumeVideo.storeRelease(qRound(volume * kVolumeRound));
} }
float64 Mixer::getVideoVolume() const { float64 Mixer::getVideoVolume() const {
return float64(_videoVolume.loadAcquire()) / kVideoVolumeRound; return float64(_volumeVideo.loadAcquire()) / kVolumeRound;
} }
Fader::Fader(QThread *thread) : QObject() Fader::Fader(QThread *thread) : QObject()
, _timer(this) , _timer(this)
, _suppressAllGain(1., 1.) , _suppressVolumeAll(1., 1.)
, _suppressSongGain(1., 1.) { , _suppressVolumeSong(1., 1.) {
moveToThread(thread); moveToThread(thread);
_timer.moveToThread(thread); _timer.moveToThread(thread);
connect(thread, SIGNAL(started()), this, SLOT(onInit())); connect(thread, SIGNAL(started()), this, SLOT(onInit()));
@ -1134,64 +1077,66 @@ void Fader::onTimer() {
QMutexLocker lock(&AudioMutex); QMutexLocker lock(&AudioMutex);
if (!mixer()) return; if (!mixer()) return;
bool suppressAudioChanged = false, suppressSongChanged = false; auto volumeChangedAll = false;
auto volumeChangedSong = false;
if (_suppressAll || _suppressSongAnim) { if (_suppressAll || _suppressSongAnim) {
auto ms = getms(); auto ms = getms();
auto wasSong = suppressSongGain;
if (_suppressAll) { if (_suppressAll) {
auto wasAudio = suppressAllGain;
if (ms >= _suppressAllEnd || ms < _suppressAllStart) { if (ms >= _suppressAllEnd || ms < _suppressAllStart) {
_suppressAll = _suppressAllAnim = false; _suppressAll = _suppressAllAnim = false;
_suppressAllGain = anim::value(1., 1.); _suppressVolumeAll = anim::value(1., 1.);
} else if (ms > _suppressAllEnd - kFadeDuration) { } else if (ms > _suppressAllEnd - kFadeDuration) {
if (_suppressAllGain.to() != 1.) _suppressAllGain.start(1.); if (_suppressVolumeAll.to() != 1.) _suppressVolumeAll.start(1.);
_suppressAllGain.update(1. - ((_suppressAllEnd - ms) / float64(kFadeDuration)), anim::linear); _suppressVolumeAll.update(1. - ((_suppressAllEnd - ms) / float64(kFadeDuration)), anim::linear);
} else if (ms >= _suppressAllStart + st::mediaPlayerSuppressDuration) { } else if (ms >= _suppressAllStart + st::mediaPlayerSuppressDuration) {
if (_suppressAllAnim) { if (_suppressAllAnim) {
_suppressAllGain.finish(); _suppressVolumeAll.finish();
_suppressAllAnim = false; _suppressAllAnim = false;
} }
} else if (ms > _suppressAllStart) { } else if (ms > _suppressAllStart) {
_suppressAllGain.update((ms - _suppressAllStart) / float64(st::mediaPlayerSuppressDuration), anim::linear); _suppressVolumeAll.update((ms - _suppressAllStart) / float64(st::mediaPlayerSuppressDuration), anim::linear);
} }
suppressAllGain = _suppressAllGain.current(); auto wasVolumeMultiplierAll = VolumeMultiplierAll;
suppressAudioChanged = (suppressAllGain != wasAudio); VolumeMultiplierAll = _suppressVolumeAll.current();
volumeChangedAll = (VolumeMultiplierAll != wasVolumeMultiplierAll);
} }
if (_suppressSongAnim) { if (_suppressSongAnim) {
if (ms >= _suppressSongStart + kFadeDuration) { if (ms >= _suppressSongStart + kFadeDuration) {
_suppressSongGain.finish(); _suppressVolumeSong.finish();
_suppressSongAnim = false; _suppressSongAnim = false;
} else { } else {
_suppressSongGain.update((ms - _suppressSongStart) / float64(kFadeDuration), anim::linear); _suppressVolumeSong.update((ms - _suppressSongStart) / float64(kFadeDuration), anim::linear);
} }
} }
suppressSongGain = qMin(suppressAllGain, _suppressSongGain.current()); auto wasVolumeMultiplierSong = VolumeMultiplierSong;
suppressSongChanged = (suppressSongGain != wasSong); VolumeMultiplierSong = _suppressVolumeSong.current();
accumulate_min(VolumeMultiplierSong, VolumeMultiplierAll);
volumeChangedSong = (VolumeMultiplierSong != wasVolumeMultiplierSong);
} }
bool hasFading = (_suppressAll || _suppressSongAnim); auto hasFading = (_suppressAll || _suppressSongAnim);
bool hasPlaying = false; auto hasPlaying = false;
auto updatePlayback = [this, &hasPlaying, &hasFading](AudioMsgId::Type type, int index, float64 suppressGain, bool suppressGainChanged) { auto updatePlayback = [this, &hasPlaying, &hasFading](AudioMsgId::Type type, int index, float64 volumeMultiplier, bool suppressGainChanged) {
auto track = mixer()->trackForType(type, index); auto track = mixer()->trackForType(type, index);
if (IsStopped(track->state.state) || track->state.state == State::Paused || !track->isStreamCreated()) return; if (IsStopped(track->state.state) || track->state.state == State::Paused || !track->isStreamCreated()) return;
int32 emitSignals = updateOnePlayback(track, hasPlaying, hasFading, suppressGain, suppressGainChanged); auto emitSignals = updateOnePlayback(track, hasPlaying, hasFading, volumeMultiplier, suppressGainChanged);
if (emitSignals & EmitError) emit error(track->state.id); if (emitSignals & EmitError) emit error(track->state.id);
if (emitSignals & EmitStopped) emit audioStopped(track->state.id); if (emitSignals & EmitStopped) emit audioStopped(track->state.id);
if (emitSignals & EmitPositionUpdated) emit playPositionUpdated(track->state.id); if (emitSignals & EmitPositionUpdated) emit playPositionUpdated(track->state.id);
if (emitSignals & EmitNeedToPreload) emit needToPreload(track->state.id); if (emitSignals & EmitNeedToPreload) emit needToPreload(track->state.id);
}; };
auto suppressGainForMusic = suppressSongGain * Global::SongVolume(); auto suppressGainForMusic = ComputeVolume(AudioMsgId::Type::Song);
auto suppressGainForMusicChanged = suppressSongChanged || _songVolumeChanged; auto suppressGainForMusicChanged = volumeChangedSong || _volumeChangedSong;
for (auto i = 0; i != kTogetherLimit; ++i) { for (auto i = 0; i != kTogetherLimit; ++i) {
updatePlayback(AudioMsgId::Type::Voice, i, suppressAllGain, suppressAudioChanged); updatePlayback(AudioMsgId::Type::Voice, i, VolumeMultiplierAll, volumeChangedAll);
updatePlayback(AudioMsgId::Type::Song, i, suppressGainForMusic, suppressGainForMusicChanged); updatePlayback(AudioMsgId::Type::Song, i, suppressGainForMusic, suppressGainForMusicChanged);
} }
auto suppressGainForVideo = suppressSongGain * Global::VideoVolume(); auto suppressGainForVideo = ComputeVolume(AudioMsgId::Type::Video);
auto suppressGainForVideoChanged = suppressSongChanged || _videoVolumeChanged; auto suppressGainForVideoChanged = volumeChangedAll || _volumeChangedVideo;
updatePlayback(AudioMsgId::Type::Video, 0, suppressGainForVideo, suppressGainForVideoChanged); updatePlayback(AudioMsgId::Type::Video, 0, suppressGainForVideo, suppressGainForVideoChanged);
_songVolumeChanged = _videoVolumeChanged = false; _volumeChangedSong = _volumeChangedVideo = false;
if (hasFading) { if (hasFading) {
_timer.start(kCheckFadingTimeout); _timer.start(kCheckFadingTimeout);
@ -1204,8 +1149,9 @@ void Fader::onTimer() {
} }
} }
int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 suppressGain, bool suppressGainChanged) { int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged) {
bool playing = false, fading = false; auto playing = false;
auto fading = false;
auto errorHappened = [this, track] { auto errorHappened = [this, track] {
if (Audio::PlaybackErrorHappened()) { if (Audio::PlaybackErrorHappened()) {
@ -1258,7 +1204,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
emitSignals |= EmitStopped; emitSignals |= EmitStopped;
} else if (TimeMs(1000) * fadingForSamplesCount >= kFadeDuration * track->state.frequency) { } else if (TimeMs(1000) * fadingForSamplesCount >= kFadeDuration * track->state.frequency) {
fading = false; fading = false;
alSourcef(track->stream.source, AL_GAIN, 1. * suppressGain); alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
if (errorHappened()) return EmitError; if (errorHappened()) return EmitError;
switch (track->state.state) { switch (track->state.state) {
@ -1286,7 +1232,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
if (track->state.state == State::Pausing || track->state.state == State::Finishing) { if (track->state.state == State::Pausing || track->state.state == State::Finishing) {
newGain = 1. - newGain; newGain = 1. - newGain;
} }
alSourcef(track->stream.source, AL_GAIN, newGain * suppressGain); alSourcef(track->stream.source, AL_GAIN, newGain * volumeMultiplier);
if (errorHappened()) return EmitError; if (errorHappened()) return EmitError;
} }
} else if (playing && (state == AL_PLAYING || !track->loading)) { } else if (playing && (state == AL_PLAYING || !track->loading)) {
@ -1299,8 +1245,8 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
} }
setStoppedState(track, State::StoppedAtEnd); setStoppedState(track, State::StoppedAtEnd);
emitSignals |= EmitStopped; emitSignals |= EmitStopped;
} else if (suppressGainChanged) { } else if (volumeChanged) {
alSourcef(track->stream.source, AL_GAIN, suppressGain); alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
if (errorHappened()) return EmitError; if (errorHappened()) return EmitError;
} }
} }
@ -1333,7 +1279,7 @@ void Fader::onSuppressSong() {
_suppressSong = true; _suppressSong = true;
_suppressSongAnim = true; _suppressSongAnim = true;
_suppressSongStart = getms(); _suppressSongStart = getms();
_suppressSongGain.start(st::suppressSong); _suppressVolumeSong.start(kSuppressRatioSong);
onTimer(); onTimer();
} }
} }
@ -1343,7 +1289,7 @@ void Fader::onUnsuppressSong() {
_suppressSong = false; _suppressSong = false;
_suppressSongAnim = true; _suppressSongAnim = true;
_suppressSongStart = getms(); _suppressSongStart = getms();
_suppressSongGain.start(1.); _suppressVolumeSong.start(1.);
onTimer(); onTimer();
} }
} }
@ -1355,17 +1301,17 @@ void Fader::onSuppressAll(qint64 duration) {
_suppressAllStart = now; _suppressAllStart = now;
} }
_suppressAllEnd = now + duration; _suppressAllEnd = now + duration;
_suppressAllGain.start(st::suppressAll); _suppressVolumeAll.start(kSuppressRatioAll);
onTimer(); onTimer();
} }
void Fader::onSongVolumeChanged() { void Fader::onSongVolumeChanged() {
_songVolumeChanged = true; _volumeChangedSong = true;
onTimer(); onTimer();
} }
void Fader::onVideoVolumeChanged() { void Fader::onVideoVolumeChanged() {
_videoVolumeChanged = true; _volumeChangedVideo = true;
onTimer(); onTimer();
} }

View File

@ -114,18 +114,15 @@ public:
Mixer(); Mixer();
void play(const AudioMsgId &audio, int64 position = 0); void play(const AudioMsgId &audio, int64 position = 0);
void pauseresume(AudioMsgId::Type type, bool fast = false); void play(const AudioMsgId &audio, std::unique_ptr<VideoSoundData> videoData, int64 position = 0);
void pause(const AudioMsgId &audio, bool fast = false);
void resume(const AudioMsgId &audio, bool fast = false);
void seek(AudioMsgId::Type type, int64 position); // type == AudioMsgId::Type::Song void seek(AudioMsgId::Type type, int64 position); // type == AudioMsgId::Type::Song
void stop(AudioMsgId::Type type); void stop(const AudioMsgId &audio);
// Video player audio stream interface. // Video player audio stream interface.
void initFromVideo(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data, int64 position);
void feedFromVideo(VideoSoundPart &&part); void feedFromVideo(VideoSoundPart &&part);
int64 getVideoCorrectedTime(uint64 playId, TimeMs frameMs, TimeMs systemMs); int64 getVideoCorrectedTime(const AudioMsgId &id, TimeMs frameMs, TimeMs systemMs);
TrackState currentVideoState(uint64 videoPlayId);
void stopFromVideo(uint64 videoPlayId);
void pauseFromVideo(uint64 videoPlayId);
void resumeFromVideo(uint64 videoPlayId);
void stopAndClear(); void stopAndClear();
@ -143,6 +140,8 @@ public:
void reattachTracks(); void reattachTracks();
// Thread: Any. // Thread: Any.
void setSongVolume(float64 volume);
float64 getSongVolume() const;
void setVideoVolume(float64 volume); void setVideoVolume(float64 volume);
float64 getVideoVolume() const; float64 getVideoVolume() const;
@ -177,7 +176,9 @@ private:
public: public:
static constexpr int kBuffersCount = 3; static constexpr int kBuffersCount = 3;
// Thread: Any. Must be locked: AudioMutex.
void reattach(AudioMsgId::Type type); void reattach(AudioMsgId::Type type);
void detach(); void detach();
void clear(); void clear();
void started(); void started();
@ -187,6 +188,8 @@ private:
int getNotQueuedBufferIndex(); int getNotQueuedBufferIndex();
~Track();
TrackState state; TrackState state;
FileLocation file; FileLocation file;
@ -207,10 +210,11 @@ private:
uint32 buffers[kBuffersCount] = { 0 }; uint32 buffers[kBuffersCount] = { 0 };
}; };
Stream stream; Stream stream;
uint64 videoPlayId = 0;
std::unique_ptr<VideoSoundData> videoData; std::unique_ptr<VideoSoundData> videoData;
TimeMs lastUpdateWhen = 0;
TimeMs lastUpdateCorrectedMs = 0;
private: private:
void createStream(); void createStream();
void destroyStream(); void destroyStream();
@ -232,13 +236,9 @@ private:
Track _songTracks[kTogetherLimit]; Track _songTracks[kTogetherLimit];
Track _videoTrack; Track _videoTrack;
QAtomicInt _videoVolume;
uint64 _lastVideoPlayId = 0;
TimeMs _lastVideoPlaybackWhen = 0;
TimeMs _lastVideoPlaybackCorrectedMs = 0;
QMutex _lastVideoMutex;
QMutex _mutex; QAtomicInt _volumeVideo;
QAtomicInt _volumeSong;
friend class Fader; friend class Fader;
friend class Loaders; friend class Loaders;
@ -280,18 +280,20 @@ private:
EmitPositionUpdated = 0x04, EmitPositionUpdated = 0x04,
EmitNeedToPreload = 0x08, EmitNeedToPreload = 0x08,
}; };
int32 updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 suppressGain, bool suppressGainChanged); int32 updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged);
void setStoppedState(Mixer::Track *track, State state = State::Stopped); void setStoppedState(Mixer::Track *track, State state = State::Stopped);
QTimer _timer; QTimer _timer;
bool _volumeChangedSong = false;
bool _volumeChangedVideo = false;
bool _suppressAll = false; bool _suppressAll = false;
bool _suppressAllAnim = false; bool _suppressAllAnim = false;
bool _suppressSong = false; bool _suppressSong = false;
bool _suppressSongAnim = false; bool _suppressSongAnim = false;
bool _songVolumeChanged = false; anim::value _suppressVolumeAll;
bool _videoVolumeChanged = false; anim::value _suppressVolumeSong;
anim::value _suppressAllGain, _suppressSongGain;
TimeMs _suppressAllStart = 0; TimeMs _suppressAllStart = 0;
TimeMs _suppressAllEnd = 0; TimeMs _suppressAllEnd = 0;
TimeMs _suppressSongStart = 0; TimeMs _suppressSongStart = 0;

View File

@ -20,6 +20,10 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/ */
#pragma once #pragma once
namespace FFMpeg {
struct AVPacketDataWrap;
} // namespace FFMpeg
class AudioPlayerLoader { class AudioPlayerLoader {
public: public:
AudioPlayerLoader(const FileLocation &file, const QByteArray &data, base::byte_vector &&bytes); AudioPlayerLoader(const FileLocation &file, const QByteArray &data, base::byte_vector &&bytes);
@ -40,6 +44,9 @@ public:
EndOfFile, EndOfFile,
}; };
virtual ReadResult readMore(QByteArray &samples, int64 &samplesCount) = 0; virtual ReadResult readMore(QByteArray &samples, int64 &samplesCount) = 0;
virtual void enqueuePackets(QQueue<FFMpeg::AVPacketDataWrap> &packets) {
Unexpected("enqueuePackets() call on not ChildFFMpegLoader.");
}
void saveDecodedSamples(QByteArray *samples, int64 *samplesCount); void saveDecodedSamples(QByteArray *samples, int64 *samplesCount);
void takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount); void takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount);

View File

@ -35,38 +35,43 @@ Loaders::Loaders(QThread *thread) : _fromVideoNotify([this] { videoSoundAdded();
} }
void Loaders::feedFromVideo(VideoSoundPart &&part) { void Loaders::feedFromVideo(VideoSoundPart &&part) {
bool invoke = false; auto invoke = false;
{ {
QMutexLocker lock(&_fromVideoMutex); QMutexLocker lock(&_fromVideoMutex);
if (_fromVideoPlayId == part.videoPlayId) { _fromVideoQueues[part.audio].enqueue(FFMpeg::dataWrapFromPacket(*part.packet));
_fromVideoQueue.enqueue(FFMpeg::dataWrapFromPacket(*part.packet)); invoke = true;
invoke = true;
} else {
FFMpeg::freePacket(part.packet);
}
} }
if (invoke) { if (invoke) {
_fromVideoNotify.call(); _fromVideoNotify.call();
} }
} }
void Loaders::startFromVideo(uint64 videoPlayId) {
QMutexLocker lock(&_fromVideoMutex);
_fromVideoPlayId = videoPlayId;
clearFromVideoQueue();
}
void Loaders::stopFromVideo() {
startFromVideo(0);
}
void Loaders::videoSoundAdded() { void Loaders::videoSoundAdded() {
bool waitingAndAdded = false; auto waitingAndAdded = false;
auto queues = decltype(_fromVideoQueues)();
{ {
QMutexLocker lock(&_fromVideoMutex); QMutexLocker lock(&_fromVideoMutex);
if (_videoLoader && _videoLoader->playId() == _fromVideoPlayId && !_fromVideoQueue.isEmpty()) { queues = base::take(_fromVideoQueues);
_videoLoader->enqueuePackets(_fromVideoQueue); }
waitingAndAdded = _videoLoader->holdsSavedDecodedSamples(); auto tryLoader = [this](auto &audio, auto &loader, auto &it) {
if (audio == it.key() && loader) {
loader->enqueuePackets(it.value());
if (loader->holdsSavedDecodedSamples()) {
onLoad(audio);
}
return true;
}
return false;
};
for (auto i = queues.begin(), e = queues.end(); i != e; ++i) {
if (!tryLoader(_audio, _audioLoader, i)
&& !tryLoader(_song, _songLoader, i)
&& !tryLoader(_video, _videoLoader, i)) {
for (auto &packetData : i.value()) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
} }
} }
if (waitingAndAdded) { if (waitingAndAdded) {
@ -80,11 +85,13 @@ Loaders::~Loaders() {
} }
void Loaders::clearFromVideoQueue() { void Loaders::clearFromVideoQueue() {
auto queue = base::take(_fromVideoQueue); auto queues = base::take(_fromVideoQueues);
for (auto &packetData : queue) { for (auto &queue : queues) {
AVPacket packet; for (auto &packetData : queue) {
FFMpeg::packetFromDataWrap(packet, packetData); AVPacket packet;
FFMpeg::freePacket(&packet); FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
} }
} }
@ -316,22 +323,22 @@ AudioPlayerLoader *Loaders::setupLoader(const AudioMsgId &audio, SetupError &err
switch (audio.type()) { switch (audio.type()) {
case AudioMsgId::Type::Voice: _audio = audio; loader = &_audioLoader; break; case AudioMsgId::Type::Voice: _audio = audio; loader = &_audioLoader; break;
case AudioMsgId::Type::Song: _song = audio; loader = &_songLoader; break; case AudioMsgId::Type::Song: _song = audio; loader = &_songLoader; break;
case AudioMsgId::Type::Video: _video = audio; break; case AudioMsgId::Type::Video: _video = audio; loader = &_videoLoader; break;
} }
if (audio.type() == AudioMsgId::Type::Video) { if (audio.playId()) {
if (!track->videoData) { if (!track->videoData) {
clear(audio.type());
track->state.state = State::StoppedAtError; track->state.state = State::StoppedAtError;
emit error(audio); emit error(audio);
LOG(("Audio Error: video sound data not ready")); LOG(("Audio Error: video sound data not ready"));
return nullptr; return nullptr;
} }
_videoLoader = std::make_unique<ChildFFMpegLoader>(track->videoPlayId, std::move(track->videoData)); *loader = std::make_unique<ChildFFMpegLoader>(std::move(track->videoData));
l = _videoLoader.get();
} else { } else {
*loader = std::make_unique<FFMpegLoader>(track->file, track->data, base::byte_vector()); *loader = std::make_unique<FFMpegLoader>(track->file, track->data, base::byte_vector());
l = loader->get();
} }
l = loader->get();
if (!l->open(position)) { if (!l->open(position)) {
track->state.state = State::StoppedAtStart; track->state.state = State::StoppedAtStart;

View File

@ -35,8 +35,6 @@ class Loaders : public QObject {
public: public:
Loaders(QThread *thread); Loaders(QThread *thread);
void startFromVideo(uint64 videoPlayId);
void stopFromVideo();
void feedFromVideo(VideoSoundPart &&part); void feedFromVideo(VideoSoundPart &&part);
~Loaders(); ~Loaders();
@ -58,11 +56,10 @@ private:
AudioMsgId _audio, _song, _video; AudioMsgId _audio, _song, _video;
std::unique_ptr<AudioPlayerLoader> _audioLoader; std::unique_ptr<AudioPlayerLoader> _audioLoader;
std::unique_ptr<AudioPlayerLoader> _songLoader; std::unique_ptr<AudioPlayerLoader> _songLoader;
std::unique_ptr<ChildFFMpegLoader> _videoLoader; std::unique_ptr<AudioPlayerLoader> _videoLoader;
QMutex _fromVideoMutex; QMutex _fromVideoMutex;
uint64 _fromVideoPlayId; QMap<AudioMsgId, QQueue<FFMpeg::AVPacketDataWrap>> _fromVideoQueues;
QQueue<FFMpeg::AVPacketDataWrap> _fromVideoQueue;
SingleQueuedInvokation _fromVideoNotify; SingleQueuedInvokation _fromVideoNotify;
void emitError(AudioMsgId::Type type); void emitError(AudioMsgId::Type type);

View File

@ -32,8 +32,7 @@ VideoSoundData::~VideoSoundData() {
} }
} }
ChildFFMpegLoader::ChildFFMpegLoader(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data) : AudioPlayerLoader(FileLocation(), QByteArray(), base::byte_vector()) ChildFFMpegLoader::ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data) : AudioPlayerLoader(FileLocation(), QByteArray(), base::byte_vector())
, _videoPlayId(videoPlayId)
, _parentData(std::move(data)) { , _parentData(std::move(data)) {
_frame = av_frame_alloc(); _frame = av_frame_alloc();
} }

View File

@ -35,13 +35,14 @@ extern "C" {
struct VideoSoundData { struct VideoSoundData {
AVCodecContext *context = nullptr; AVCodecContext *context = nullptr;
int32 frequency = Media::Player::kDefaultFrequency; int32 frequency = Media::Player::kDefaultFrequency;
TimeMs length = 0; int64 length = 0;
~VideoSoundData(); ~VideoSoundData();
}; };
struct VideoSoundPart { struct VideoSoundPart {
AVPacket *packet = nullptr; AVPacket *packet = nullptr;
uint64 videoPlayId = 0; AudioMsgId audio;
uint32 playId = 0;
}; };
namespace FFMpeg { namespace FFMpeg {
@ -82,7 +83,7 @@ inline void freePacket(AVPacket *packet) {
class ChildFFMpegLoader : public AudioPlayerLoader { class ChildFFMpegLoader : public AudioPlayerLoader {
public: public:
ChildFFMpegLoader(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data); ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data);
bool open(qint64 &position) override; bool open(qint64 &position) override;
@ -103,11 +104,8 @@ public:
} }
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override; ReadResult readMore(QByteArray &result, int64 &samplesAdded) override;
void enqueuePackets(QQueue<FFMpeg::AVPacketDataWrap> &packets); void enqueuePackets(QQueue<FFMpeg::AVPacketDataWrap> &packets) override;
uint64 playId() const {
return _videoPlayId;
}
bool eofReached() const { bool eofReached() const {
return _eofReached; return _eofReached;
} }
@ -126,7 +124,6 @@ private:
int32 _maxResampleSamples = 1024; int32 _maxResampleSamples = 1024;
uint8_t **_dstSamplesData = nullptr; uint8_t **_dstSamplesData = nullptr;
uint64 _videoPlayId = 0;
std::unique_ptr<VideoSoundData> _parentData; std::unique_ptr<VideoSoundData> _parentData;
AVSampleFormat _inputFormat; AVSampleFormat _inputFormat;
AVFrame *_frame = nullptr; AVFrame *_frame = nullptr;

View File

@ -56,8 +56,8 @@ bool isAlignedImage(const QImage &image) {
} // namespace } // namespace
FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId) : ReaderImplementation(location, data) FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data, const AudioMsgId &audio) : ReaderImplementation(location, data)
, _playId(playId) { , _audioMsgId(audio) {
_frame = av_frame_alloc(); _frame = av_frame_alloc();
av_init_packet(&_packetNull); av_init_packet(&_packetNull);
_packetNull.data = nullptr; _packetNull.data = nullptr;
@ -186,7 +186,7 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(Time
} }
// sync by audio stream // sync by audio stream
auto correctMs = (frameMs >= 0) ? Player::mixer()->getVideoCorrectedTime(_playId, frameMs, systemMs) : frameMs; auto correctMs = (frameMs >= 0) ? Player::mixer()->getVideoCorrectedTime(_audioMsgId, frameMs, systemMs) : frameMs;
if (!_frameRead) { if (!_frameRead) {
auto readResult = readNextFrame(); auto readResult = readNextFrame();
if (readResult != ReadResult::Success) { if (readResult != ReadResult::Success) {
@ -220,18 +220,18 @@ TimeMs FFMpegReaderImplementation::durationMs() const {
void FFMpegReaderImplementation::pauseAudio() { void FFMpegReaderImplementation::pauseAudio() {
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
Player::mixer()->pauseFromVideo(_playId); Player::mixer()->pause(_audioMsgId, true);
} }
} }
void FFMpegReaderImplementation::resumeAudio() { void FFMpegReaderImplementation::resumeAudio() {
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
Player::mixer()->resumeFromVideo(_playId); Player::mixer()->resume(_audioMsgId, true);
} }
} }
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) { bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
t_assert(_frameRead); Expects(_frameRead);
_frameRead = false; _frameRead = false;
if (!_width || !_height) { if (!_width || !_height) {
@ -371,7 +371,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
if (_mode == Mode::Inspecting) { if (_mode == Mode::Inspecting) {
_hasAudioStream = (_audioStreamId >= 0); _hasAudioStream = (_audioStreamId >= 0);
_audioStreamId = -1; _audioStreamId = -1;
} else if (_mode == Mode::Silent || !_playId) { } else if (_mode == Mode::Silent || !_audioMsgId.playId()) {
_audioStreamId = -1; _audioStreamId = -1;
} }
@ -427,7 +427,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
auto position = (positionMs * soundData->frequency) / 1000LL; auto position = (positionMs * soundData->frequency) / 1000LL;
Player::mixer()->initFromVideo(_playId, std::move(soundData), position); Player::mixer()->play(_audioMsgId, std::move(soundData), position);
} }
if (readResult == PacketResult::Ok) { if (readResult == PacketResult::Ok) {
@ -481,7 +481,7 @@ QString FFMpegReaderImplementation::logData() const {
FFMpegReaderImplementation::~FFMpegReaderImplementation() { FFMpegReaderImplementation::~FFMpegReaderImplementation() {
if (_audioStreamId >= 0) { if (_audioStreamId >= 0) {
Player::mixer()->stopFromVideo(_playId); Player::mixer()->stop(_audioMsgId);
} }
clearPacketQueue(); clearPacketQueue();
@ -517,7 +517,7 @@ FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(
// queue terminating packet to audio player // queue terminating packet to audio player
VideoSoundPart part; VideoSoundPart part;
part.packet = &_packetNull; part.packet = &_packetNull;
part.videoPlayId = _playId; part.audio = _audioMsgId;
Player::mixer()->feedFromVideo(std::move(part)); Player::mixer()->feedFromVideo(std::move(part));
} }
return PacketResult::EndOfFile; return PacketResult::EndOfFile;
@ -543,7 +543,7 @@ void FFMpegReaderImplementation::processPacket(AVPacket *packet) {
// queue packet to audio player // queue packet to audio player
VideoSoundPart part; VideoSoundPart part;
part.packet = packet; part.packet = packet;
part.videoPlayId = _playId; part.audio = _audioMsgId;
Player::mixer()->feedFromVideo(std::move(part)); Player::mixer()->feedFromVideo(std::move(part));
} }
} else { } else {

View File

@ -35,7 +35,7 @@ namespace internal {
class FFMpegReaderImplementation : public ReaderImplementation { class FFMpegReaderImplementation : public ReaderImplementation {
public: public:
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId); FFMpegReaderImplementation(FileLocation *location, QByteArray *data, const AudioMsgId &audio);
ReadResult readFramesTill(TimeMs frameMs, TimeMs systemMs) override; ReadResult readFramesTill(TimeMs frameMs, TimeMs systemMs) override;
@ -110,7 +110,7 @@ private:
bool _hasAudioStream = false; bool _hasAudioStream = false;
int _audioStreamId = -1; int _audioStreamId = -1;
uint64 _playId = 0; AudioMsgId _audioMsgId;
TimeMs _lastReadVideoMs = 0; TimeMs _lastReadVideoMs = 0;
TimeMs _lastReadAudioMs = 0; TimeMs _lastReadAudioMs = 0;

View File

@ -91,11 +91,22 @@ QPixmap PrepareFrame(const FrameRequest &request, const QImage &original, bool h
} // namespace } // namespace
Reader::Reader(const FileLocation &location, const QByteArray &data, Callback &&callback, Mode mode, int64 seekMs) Reader::Reader(const QString &filepath, Callback &&callback, Mode mode, int64 seekMs)
: _callback(std::move(callback)) : _callback(std::move(callback))
, _mode(mode) , _mode(mode)
, _playId(rand_value<uint64>())
, _seekPositionMs(seekMs) { , _seekPositionMs(seekMs) {
init(FileLocation(filepath), QByteArray());
}
Reader::Reader(gsl::not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode, int64 seekMs)
: _callback(std::move(callback))
, _mode(mode)
, _audioMsgId(document, msgId, (mode == Mode::Video) ? rand_value<uint32>() : 0)
, _seekPositionMs(seekMs) {
init(document->location(), document->data());
}
void Reader::init(const FileLocation &location, const QByteArray &data) {
if (threads.size() < ClipThreadsCount) { if (threads.size() < ClipThreadsCount) {
_threadIndex = threads.size(); _threadIndex = threads.size();
threads.push_back(new QThread()); threads.push_back(new QThread());
@ -338,7 +349,7 @@ class ReaderPrivate {
public: public:
ReaderPrivate(Reader *reader, const FileLocation &location, const QByteArray &data) : _interface(reader) ReaderPrivate(Reader *reader, const FileLocation &location, const QByteArray &data) : _interface(reader)
, _mode(reader->mode()) , _mode(reader->mode())
, _playId(reader->playId()) , _audioMsgId(reader->audioMsgId())
, _seekPositionMs(reader->seekPositionMs()) , _seekPositionMs(reader->seekPositionMs())
, _data(data) { , _data(data) {
if (_data.isEmpty()) { if (_data.isEmpty()) {
@ -361,9 +372,8 @@ public:
// If seek was done to the end: try to read the first frame, // If seek was done to the end: try to read the first frame,
// get the frame size and return a black frame with that size. // get the frame size and return a black frame with that size.
auto firstFramePlayId = 0LL; auto firstFramePositionMs = TimeMs(0);
auto firstFramePositionMs = 0LL; auto reader = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, AudioMsgId());
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, firstFramePlayId);
if (reader->start(internal::ReaderImplementation::Mode::Normal, firstFramePositionMs)) { if (reader->start(internal::ReaderImplementation::Mode::Normal, firstFramePositionMs)) {
auto firstFrameReadResult = reader->readFramesTill(-1, ms); auto firstFrameReadResult = reader->readFramesTill(-1, ms);
if (firstFrameReadResult == internal::ReaderImplementation::ReadResult::Success) { if (firstFrameReadResult == internal::ReaderImplementation::ReadResult::Success) {
@ -470,7 +480,7 @@ public:
} }
} }
_implementation = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, _playId); _implementation = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, _audioMsgId);
// _implementation = new QtGifReaderImplementation(_location, &_data); // _implementation = new QtGifReaderImplementation(_location, &_data);
auto implementationMode = [this]() { auto implementationMode = [this]() {
@ -532,7 +542,7 @@ private:
Reader *_interface; Reader *_interface;
State _state = State::Reading; State _state = State::Reading;
Reader::Mode _mode; Reader::Mode _mode;
uint64 _playId; AudioMsgId _audioMsgId;
TimeMs _seekPositionMs = 0; TimeMs _seekPositionMs = 0;
QByteArray _data; QByteArray _data;
@ -844,9 +854,8 @@ FileLoadTask::Video PrepareForSending(const QString &fname, const QByteArray &da
auto localLocation = FileLocation(fname); auto localLocation = FileLocation(fname);
auto localData = QByteArray(data); auto localData = QByteArray(data);
auto playId = 0ULL;
auto seekPositionMs = 0LL; auto seekPositionMs = 0LL;
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(&localLocation, &localData, playId); auto reader = std::make_unique<internal::FFMpegReaderImplementation>(&localLocation, &localData, AudioMsgId());
if (reader->start(internal::ReaderImplementation::Mode::Inspecting, seekPositionMs)) { if (reader->start(internal::ReaderImplementation::Mode::Inspecting, seekPositionMs)) {
auto durationMs = reader->durationMs(); auto durationMs = reader->durationMs();
if (durationMs > 0) { if (durationMs > 0) {

View File

@ -61,7 +61,9 @@ public:
Video, Video,
}; };
Reader(const FileLocation &location, const QByteArray &data, Callback &&callback, Mode mode = Mode::Gif, TimeMs seekMs = 0); Reader(const QString &filepath, Callback &&callback, Mode mode = Mode::Gif, TimeMs seekMs = 0);
Reader(gsl::not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode = Mode::Gif, TimeMs seekMs = 0);
static void callback(Reader *reader, int threadIndex, Notification notification); // reader can be deleted static void callback(Reader *reader, int threadIndex, Notification notification); // reader can be deleted
void setAutoplay() { void setAutoplay() {
@ -71,8 +73,8 @@ public:
return _autoplay; return _autoplay;
} }
uint64 playId() const { AudioMsgId audioMsgId() const {
return _playId; return _audioMsgId;
} }
TimeMs seekPositionMs() const { TimeMs seekPositionMs() const {
return _seekPositionMs; return _seekPositionMs;
@ -126,13 +128,14 @@ public:
~Reader(); ~Reader();
private: private:
void init(const FileLocation &location, const QByteArray &data);
Callback _callback; Callback _callback;
Mode _mode; Mode _mode;
State _state = State::Reading; State _state = State::Reading;
uint64 _playId; AudioMsgId _audioMsgId;
bool _hasAudio = false; bool _hasAudio = false;
TimeMs _durationMs = 0; TimeMs _durationMs = 0;
TimeMs _seekPositionMs = 0; TimeMs _seekPositionMs = 0;
@ -157,9 +160,9 @@ private:
TimeMs positionMs = 0; TimeMs positionMs = 0;
}; };
mutable Frame _frames[3]; mutable Frame _frames[3];
Frame *frameToShow(int *index = 0) const; // 0 means not ready Frame *frameToShow(int *index = nullptr) const; // 0 means not ready
Frame *frameToWrite(int *index = 0) const; // 0 means not ready Frame *frameToWrite(int *index = nullptr) const; // 0 means not ready
Frame *frameToWriteNext(bool check, int *index = 0) const; Frame *frameToWriteNext(bool check, int *index = nullptr) const;
void moveToNextShow() const; void moveToNextShow() const;
void moveToNextWrite() const; void moveToNextWrite() const;

View File

@ -113,6 +113,7 @@ CoverWidget::CoverWidget(QWidget *parent) : TWidget(parent)
updateVolumeToggleIcon(); updateVolumeToggleIcon();
_volumeToggle->setClickedCallback([this]() { _volumeToggle->setClickedCallback([this]() {
Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume()); Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume());
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify(); Global::RefSongVolumeChanged().notify();
}); });
subscribe(Global::RefSongVolumeChanged(), [this] { updateVolumeToggleIcon(); }); subscribe(Global::RefSongVolumeChanged(), [this] { updateVolumeToggleIcon(); });

View File

@ -176,8 +176,8 @@ void Instance::play() {
if (state.id) { if (state.id) {
if (IsStopped(state.state)) { if (IsStopped(state.state)) {
mixer()->play(state.id); mixer()->play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) { } else {
mixer()->pauseresume(AudioMsgId::Type::Song); mixer()->resume(state.id);
} }
} else if (_current) { } else if (_current) {
mixer()->play(_current); mixer()->play(_current);
@ -198,16 +198,15 @@ void Instance::play(const AudioMsgId &audioId) {
void Instance::pause(AudioMsgId::Type type) { void Instance::pause(AudioMsgId::Type type) {
auto state = mixer()->currentState(type); auto state = mixer()->currentState(type);
if (state.id) { if (state.id) {
if (!IsStopped(state.state)) { mixer()->pause(state.id);
if (state.state == State::Starting || state.state == State::Resuming || state.state == State::Playing || state.state == State::Finishing) {
mixer()->pauseresume(type);
}
}
} }
} }
void Instance::stop() { void Instance::stop() {
mixer()->stop(AudioMsgId::Type::Song); auto state = mixer()->currentState(AudioMsgId::Type::Song);
if (state.id) {
mixer()->stop(state.id);
}
} }
void Instance::playPause() { void Instance::playPause() {
@ -215,8 +214,10 @@ void Instance::playPause() {
if (state.id) { if (state.id) {
if (IsStopped(state.state)) { if (IsStopped(state.state)) {
mixer()->play(state.id); mixer()->play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->resume(state.id);
} else { } else {
mixer()->pauseresume(AudioMsgId::Type::Song); mixer()->pause(state.id);
} }
} else if (_current) { } else if (_current) {
mixer()->play(_current); mixer()->play(_current);

View File

@ -74,6 +74,7 @@ void VolumeController::setVolume(float64 volume) {
void VolumeController::applyVolumeChange(float64 volume) { void VolumeController::applyVolumeChange(float64 volume) {
if (volume != Global::SongVolume()) { if (volume != Global::SongVolume()) {
Global::SetSongVolume(volume); Global::SetSongVolume(volume);
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify(); Global::RefSongVolumeChanged().notify();
} }
} }

View File

@ -121,6 +121,7 @@ Widget::Widget(QWidget *parent) : TWidget(parent)
updateVolumeToggleIcon(); updateVolumeToggleIcon();
_volumeToggle->setClickedCallback([this] { _volumeToggle->setClickedCallback([this] {
Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume()); Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume());
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify(); Global::RefSongVolumeChanged().notify();
}); });
subscribe(Global::RefSongVolumeChanged(), [this] { updateVolumeToggleIcon(); }); subscribe(Global::RefSongVolumeChanged(), [this] { updateVolumeToggleIcon(); });

View File

@ -1257,7 +1257,7 @@ void MediaView::displayDocument(DocumentData *doc, HistoryItem *item) { // empty
} else if (_doc->isTheme()) { } else if (_doc->isTheme()) {
initThemePreview(); initThemePreview();
} else { } else {
const FileLocation &location(_doc->location(true)); auto &location = _doc->location(true);
if (location.accessEnable()) { if (location.accessEnable()) {
if (QImageReader(location.name()).canRead()) { if (QImageReader(location.name()).canRead()) {
_current = App::pixmapFromImageInPlace(App::readImage(location.name(), 0, false)); _current = App::pixmapFromImageInPlace(App::readImage(location.name(), 0, false));
@ -1441,7 +1441,7 @@ void MediaView::createClipReader() {
_current = _doc->thumb->pixNoCache(_doc->thumb->width(), _doc->thumb->height(), videoThumbOptions(), st::mediaviewFileIconSize, st::mediaviewFileIconSize); _current = _doc->thumb->pixNoCache(_doc->thumb->width(), _doc->thumb->height(), videoThumbOptions(), st::mediaviewFileIconSize, st::mediaviewFileIconSize);
} }
auto mode = (_doc->isVideo() || _doc->isRoundVideo()) ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif; auto mode = (_doc->isVideo() || _doc->isRoundVideo()) ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif;
_gif = std::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), [this](Media::Clip::Notification notification) { _gif = Media::Clip::MakeReader(_doc, FullMsgId(_channel, _msgid), [this](Media::Clip::Notification notification) {
clipCallback(notification); clipCallback(notification);
}, mode); }, mode);
@ -1557,7 +1557,7 @@ void MediaView::restartVideoAtSeekPosition(TimeMs positionMs) {
auto rounding = (_doc && _doc->isRoundVideo()) ? ImageRoundRadius::Ellipse : ImageRoundRadius::None; auto rounding = (_doc && _doc->isRoundVideo()) ? ImageRoundRadius::Ellipse : ImageRoundRadius::None;
_current = _gif->current(_gif->width() / cIntRetinaFactor(), _gif->height() / cIntRetinaFactor(), _gif->width() / cIntRetinaFactor(), _gif->height() / cIntRetinaFactor(), rounding, ImageRoundCorner::All, getms()); _current = _gif->current(_gif->width() / cIntRetinaFactor(), _gif->height() / cIntRetinaFactor(), _gif->width() / cIntRetinaFactor(), _gif->height() / cIntRetinaFactor(), rounding, ImageRoundCorner::All, getms());
} }
_gif = std::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), [this](Media::Clip::Notification notification) { _gif = Media::Clip::MakeReader(_doc, FullMsgId(_channel, _msgid), [this](Media::Clip::Notification notification) {
clipCallback(notification); clipCallback(notification);
}, Media::Clip::Reader::Mode::Video, positionMs); }, Media::Clip::Reader::Mode::Video, positionMs);
@ -1607,16 +1607,17 @@ void MediaView::onVideoToggleFullScreen() {
} }
void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) { void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
if (audioId.type() != AudioMsgId::Type::Video || !_gif) { if (!_gif || _gif->audioMsgId() != audioId) {
return; return;
} }
auto state = Media::Player::mixer()->currentVideoState(_gif->playId()); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Video);
if (state.length) { if (state.id == _gif->audioMsgId()) {
updateVideoPlaybackState(state); if (state.length) {
updateVideoPlaybackState(state);
}
AuthSession::Current().data().setLastTimeVideoPlayedAt(getms(true));
} }
AuthSession::Current().data().setLastTimeVideoPlayedAt(getms(true));
} }
void MediaView::updateVideoPlaybackState(const Media::Player::TrackState &state) { void MediaView::updateVideoPlaybackState(const Media::Player::TrackState &state) {

View File

@ -258,7 +258,7 @@ private:
bool _pressed = false; bool _pressed = false;
int32 _dragging = 0; int32 _dragging = 0;
QPixmap _current; QPixmap _current;
std::unique_ptr<Media::Clip::Reader> _gif; Media::Clip::ReaderPointer _gif;
int32 _full = -1; // -1 - thumb, 0 - medium, 1 - full int32 _full = -1; // -1 - thumb, 0 - medium, 1 - full
// Video without audio stream playback information. // Video without audio stream playback information.

View File

@ -1203,7 +1203,11 @@ void DocumentOpenClickHandler::doOpen(DocumentData *data, HistoryItem *context,
if (playVoice) { if (playVoice) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) { if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice); if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else { } else {
auto audio = AudioMsgId(data, msgId); auto audio = AudioMsgId(data, msgId);
Media::Player::mixer()->play(audio); Media::Player::mixer()->play(audio);
@ -1215,7 +1219,11 @@ void DocumentOpenClickHandler::doOpen(DocumentData *data, HistoryItem *context,
} else if (playMusic) { } else if (playMusic) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) { if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song); if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else { } else {
auto song = AudioMsgId(data, msgId); auto song = AudioMsgId(data, msgId);
Media::Player::mixer()->play(song); Media::Player::mixer()->play(song);
@ -1503,7 +1511,11 @@ void DocumentData::performActionOnLoad() {
if (loaded()) { if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) { if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice); if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) { } else if (Media::Player::IsStopped(state.state)) {
Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId)); Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId));
if (App::main()) App::main()->mediaMarkRead(this); if (App::main()) App::main()->mediaMarkRead(this);
@ -1513,7 +1525,11 @@ void DocumentData::performActionOnLoad() {
if (loaded()) { if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song); auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) { if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song); if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) { } else if (Media::Player::IsStopped(state.state)) {
auto song = AudioMsgId(this, _actionOnLoadMsgId); auto song = AudioMsgId(this, _actionOnLoadMsgId);
Media::Player::mixer()->play(song); Media::Player::mixer()->play(song);

View File

@ -1337,16 +1337,10 @@ public:
Video, Video,
}; };
AudioMsgId() { AudioMsgId() = default;
} AudioMsgId(DocumentData *audio, const FullMsgId &msgId, uint32 playId = 0) : _audio(audio), _contextId(msgId), _playId(playId) {
AudioMsgId(DocumentData *audio, const FullMsgId &msgId) : _audio(audio), _contextId(msgId) {
setTypeFromAudio(); setTypeFromAudio();
} }
AudioMsgId(DocumentData *audio, ChannelId channelId, MsgId msgId) : _audio(audio), _contextId(channelId, msgId) {
setTypeFromAudio();
}
AudioMsgId(Type type) : _type(type) {
}
Type type() const { Type type() const {
return _type; return _type;
@ -1357,14 +1351,17 @@ public:
FullMsgId contextId() const { FullMsgId contextId() const {
return _contextId; return _contextId;
} }
uint32 playId() const {
return _playId;
}
explicit operator bool() const { explicit operator bool() const {
return _audio || (_type == Type::Video); return _audio != nullptr;
} }
private: private:
void setTypeFromAudio() { void setTypeFromAudio() {
if (_audio->voice()) { if (_audio->voice() || _audio->isRoundVideo()) {
_type = Type::Voice; _type = Type::Voice;
} else if (_audio->isVideo()) { } else if (_audio->isVideo()) {
_type = Type::Video; _type = Type::Video;
@ -1378,14 +1375,24 @@ private:
DocumentData *_audio = nullptr; DocumentData *_audio = nullptr;
Type _type = Type::Unknown; Type _type = Type::Unknown;
FullMsgId _contextId; FullMsgId _contextId;
uint32 _playId = 0;
}; };
inline bool operator<(const AudioMsgId &a, const AudioMsgId &b) { inline bool operator<(const AudioMsgId &a, const AudioMsgId &b) {
return quintptr(a.audio()) < quintptr(b.audio()) || (quintptr(a.audio()) == quintptr(b.audio()) && a.contextId() < b.contextId()); if (quintptr(a.audio()) < quintptr(b.audio())) {
return true;
} else if (quintptr(b.audio()) < quintptr(a.audio())) {
return false;
} else if (a.contextId() < b.contextId()) {
return true;
} else if (b.contextId() < a.contextId()) {
return false;
}
return (a.playId() < b.playId());
} }
inline bool operator==(const AudioMsgId &a, const AudioMsgId &b) { inline bool operator==(const AudioMsgId &a, const AudioMsgId &b) {
return a.audio() == b.audio() && a.contextId() == b.contextId(); return a.audio() == b.audio() && a.contextId() == b.contextId() && a.playId() == b.playId();
} }
inline bool operator!=(const AudioMsgId &a, const AudioMsgId &b) { inline bool operator!=(const AudioMsgId &a, const AudioMsgId &b) {
return !(a == b); return !(a == b);