Play video messages as Type::Voice.

Use AudioMsgId instead of videoPlayId.
Any audio track now can be a child loader track of some video clip.
Use Type::Voice instead of Type::Video for round video messages.
Video messages play / pause / resume the same way as voice messages.
This commit is contained in:
John Preston 2017-05-18 23:18:59 +03:00
parent 1e6d4d6b41
commit b9119e5ef6
27 changed files with 466 additions and 440 deletions

View File

@ -276,9 +276,6 @@ mediaPlayerSuppressDuration: 150;
botDescSkip: 8px;
suppressAll: 0.2;
suppressSong: 0.05;
inlineResultsLeft: 11px;
inlineResultsSkip: 3px;
inlineMediaHeight: 96px;

View File

@ -145,7 +145,7 @@ void SendFilesBox::prepareGifPreview() {
return _animated;
};
if (createGifPreview()) {
_gifPreview = Media::Clip::MakeReader(FileLocation(_files.front()), QByteArray(), [this](Media::Clip::Notification notification) {
_gifPreview = Media::Clip::MakeReader(_files.front(), [this](Media::Clip::Notification notification) {
clipCallback(notification);
});
if (_gifPreview) _gifPreview->setAutoplay();
@ -575,7 +575,7 @@ void EditCaptionBox::prepareGifPreview(DocumentData *document) {
};
auto createGifPreviewResult = createGifPreview(); // Clang freeze workaround.
if (createGifPreviewResult) {
_gifPreview = Media::Clip::MakeReader(document->location(), document->data(), [this](Media::Clip::Notification notification) {
_gifPreview = Media::Clip::MakeReader(document, _msgId, [this](Media::Clip::Notification notification) {
clipCallback(notification);
});
if (_gifPreview) _gifPreview->setAutoplay();

View File

@ -29,6 +29,7 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
#include "ui/effects/ripple_animation.h"
#include "storage/file_upload.h"
#include "auth_session.h"
#include "media/media_audio.h"
#include "messenger.h"
namespace {
@ -947,7 +948,7 @@ void HistoryItem::clipCallback(Media::Clip::Notification notification) {
return;
}
auto reader = media ? media->getClipReader() : nullptr;
auto reader = media->getClipReader();
if (!reader) {
return;
}
@ -983,6 +984,32 @@ void HistoryItem::clipCallback(Media::Clip::Notification notification) {
}
}
void HistoryItem::audioTrackUpdated() {
auto media = getMedia();
if (!media) {
return;
}
auto reader = media->getClipReader();
if (!reader || reader->mode() != Media::Clip::Reader::Mode::Video) {
return;
}
auto audio = reader->audioMsgId();
auto current = Media::Player::mixer()->currentState(audio.type());
if (current.id != audio || Media::Player::IsStopped(current.state) || current.state == Media::Player::State::Finishing) {
media->stopInline();
} else if (Media::Player::IsPaused(current.state) || current.state == Media::Player::State::Pausing) {
if (!reader->videoPaused()) {
reader->pauseResumeVideo();
}
} else {
if (reader->videoPaused()) {
reader->pauseResumeVideo();
}
}
}
void HistoryItem::recountDisplayDate() {
bool displayingDate = ([this]() {
if (isEmpty()) {

View File

@ -851,6 +851,7 @@ public:
}
void clipCallback(Media::Clip::Notification notification);
void audioTrackUpdated();
~HistoryItem();

View File

@ -1840,9 +1840,6 @@ int HistoryGif::resizeGetHeight(int width) {
auto roundCorners = (isRound || inWebPage) ? ImageRoundCorner::All : ((isBubbleTop() ? (ImageRoundCorner::TopLeft | ImageRoundCorner::TopRight) : ImageRoundCorner::None)
| ((isBubbleBottom() && _caption.isEmpty()) ? (ImageRoundCorner::BottomLeft | ImageRoundCorner::BottomRight) : ImageRoundCorner::None));
_gif->start(_thumbw, _thumbh, _width, _height, roundRadius, roundCorners);
if (isRound) {
Media::Player::mixer()->setVideoVolume(1.);
}
}
} else {
_width = qMax(_width, gifMaxStatusWidth(_data) + 2 * int32(st::msgDateImgDelta + st::msgDateImgPadding.x()));
@ -2325,18 +2322,20 @@ void HistoryGif::updateStatusText() const {
if (_gif && _gif->mode() == Media::Clip::Reader::Mode::Video) {
statusSize = -1 - _data->duration();
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Video);
if (state.length) {
auto position = int64(0);
if (!Media::Player::IsStopped(state.state) && state.state != Media::Player::State::Finishing) {
position = state.position;
} else if (state.state == Media::Player::State::StoppedAtEnd) {
position = state.length;
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == _gif->audioMsgId()) {
if (state.length) {
auto position = int64(0);
if (!Media::Player::IsStopped(state.state) && state.state != Media::Player::State::Finishing) {
position = state.position;
} else if (state.state == Media::Player::State::StoppedAtEnd) {
position = state.length;
}
accumulate_max(statusSize, -1 - int((state.length - position) / state.frequency + 1));
}
if (_roundPlayback) {
_roundPlayback->updateState(state);
}
accumulate_max(statusSize, -1 - int((state.length - position) / state.frequency + 1));
}
if (_roundPlayback) {
_roundPlayback->updateState(state);
}
}
} else {
@ -2395,8 +2394,14 @@ bool HistoryGif::playInline(bool autoplay) {
if (_data->isRoundVideo() && _gif) {
// Stop autoplayed silent video when we start playback by click.
// Stop finished video message when autoplay starts.
if ((!autoplay && _gif->mode() == Mode::Gif)
|| (autoplay && _gif->mode() == Mode::Video && _gif->state() == Media::Clip::State::Finished)) {
if (!autoplay) {
if (_gif->mode() == Mode::Gif) {
stopInline();
} else {
_gif->pauseResumeVideo();
return true;
}
} else if (autoplay && _gif->mode() == Mode::Video && _gif->state() == Media::Clip::State::Finished) {
stopInline();
}
}
@ -2407,7 +2412,7 @@ bool HistoryGif::playInline(bool autoplay) {
App::stopGifItems();
}
auto mode = (!autoplay && _data->isRoundVideo()) ? Mode::Video : Mode::Gif;
setClipReader(Media::Clip::MakeReader(_data->location(), _data->data(), [this](Media::Clip::Notification notification) {
setClipReader(Media::Clip::MakeReader(_data, _parent->fullId(), [this](Media::Clip::Notification notification) {
_parent->clipCallback(notification);
}, mode));
if (mode == Mode::Video) {

View File

@ -139,7 +139,7 @@ void Gif::paint(Painter &p, const QRect &clip, const PaintContext *context) cons
bool loaded = document->loaded(), loading = document->loading(), displayLoading = document->displayLoading();
if (loaded && !_gif && !_gif.isBad()) {
auto that = const_cast<Gif*>(this);
that->_gif = Media::Clip::MakeReader(document->location(), document->data(), [that](Media::Clip::Notification notification) {
that->_gif = Media::Clip::MakeReader(document, FullMsgId(), [that](Media::Clip::Notification notification) {
that->clipCallback(notification);
});
if (_gif) _gif->setAutoplay();
@ -1191,7 +1191,7 @@ void Game::paint(Painter &p, const QRect &clip, const PaintContext *context) con
bool loaded = document->loaded(), loading = document->loading(), displayLoading = document->displayLoading();
if (loaded && !_gif && !_gif.isBad()) {
auto that = const_cast<Game*>(this);
that->_gif = Media::Clip::MakeReader(document->location(), document->data(), [that](Media::Clip::Notification notification) {
that->_gif = Media::Clip::MakeReader(document, FullMsgId(), [that](Media::Clip::Notification notification) {
that->clipCallback(notification);
});
if (_gif) _gif->setAutoplay();

View File

@ -920,7 +920,7 @@ QPixmap MediaPreviewWidget::currentImage() const {
if (_document->loaded()) {
if (!_gif && !_gif.isBad()) {
auto that = const_cast<MediaPreviewWidget*>(this);
that->_gif = Media::Clip::MakeReader(_document->location(), _document->data(), [this, that](Media::Clip::Notification notification) {
that->_gif = Media::Clip::MakeReader(_document, FullMsgId(), [this, that](Media::Clip::Notification notification) {
that->clipCallback(notification);
});
if (_gif) _gif->setAutoplay();

View File

@ -1553,6 +1553,7 @@ void MainWidget::handleAudioUpdate(const AudioMsgId &audioId) {
if (auto item = App::histItemById(audioId.contextId())) {
Ui::repaintHistoryItem(item);
item->audioTrackUpdated();
}
if (auto items = InlineBots::Layout::documentItems()) {
for (auto item : items->value(audioId.audio())) {

View File

@ -42,8 +42,11 @@ QMutex AudioMutex;
ALCdevice *AudioDevice = nullptr;
ALCcontext *AudioContext = nullptr;
auto suppressAllGain = 1.;
auto suppressSongGain = 1.;
constexpr auto kSuppressRatioAll = 0.2;
constexpr auto kSuppressRatioSong = 0.05;
auto VolumeMultiplierAll = 1.;
auto VolumeMultiplierSong = 1.;
} // namespace
@ -249,7 +252,7 @@ void StopDetachIfNotUsedSafe() {
namespace Player {
namespace {
constexpr auto kVideoVolumeRound = 10000;
constexpr auto kVolumeRound = 10000;
constexpr auto kPreloadSamples = 2LL * 48000; // preload next part if less than 2 seconds remains
constexpr auto kFadeDuration = TimeMs(500);
constexpr auto kCheckPlaybackPositionTimeout = TimeMs(100); // 100ms per check audio position
@ -264,11 +267,12 @@ base::Observable<AudioMsgId> &Updated() {
return UpdatedObservable;
}
// Thread: Any. Must be locked: AudioMutex.
float64 ComputeVolume(AudioMsgId::Type type) {
switch (type) {
case AudioMsgId::Type::Voice: return suppressAllGain;
case AudioMsgId::Type::Song: return suppressSongGain * Global::SongVolume();
case AudioMsgId::Type::Video: return suppressSongGain * mixer()->getVideoVolume();
case AudioMsgId::Type::Voice: return VolumeMultiplierAll;
case AudioMsgId::Type::Song: return VolumeMultiplierSong * mixer()->getSongVolume();
case AudioMsgId::Type::Video: return mixer()->getVideoVolume();
}
return 1.;
}
@ -349,7 +353,8 @@ void Mixer::Track::clear() {
}
videoData = nullptr;
videoPlayId = 0;
lastUpdateWhen = 0;
lastUpdateCorrectedMs = 0;
}
void Mixer::Track::started() {
@ -432,8 +437,11 @@ void Mixer::Track::resetStream() {
}
}
Mixer::Track::~Track() = default;
Mixer::Mixer()
: _videoVolume(kVideoVolumeRound)
: _volumeVideo(kVolumeRound)
, _volumeSong(kVolumeRound)
, _fader(new Fader(&_faderThread))
, _loader(new Loaders(&_loaderThread)) {
connect(this, SIGNAL(faderOnTimer()), _fader, SLOT(onTimer()), Qt::QueuedConnection);
@ -483,7 +491,7 @@ Mixer::~Mixer() {
}
void Mixer::onUpdated(const AudioMsgId &audio) {
if (audio.type() == AudioMsgId::Type::Video) {
if (audio.playId()) {
videoSoundProgress(audio);
}
Media::Player::Updated().notify(audio);
@ -491,15 +499,29 @@ void Mixer::onUpdated(const AudioMsgId &audio) {
void Mixer::onError(const AudioMsgId &audio) {
emit stoppedOnError(audio);
if (audio.type() == AudioMsgId::Type::Voice) {
emit unsuppressSong();
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
if (type == AudioMsgId::Type::Voice) {
if (auto current = trackForType(type)) {
if (current->state.id == audio) {
emit unsuppressSong();
}
}
}
}
void Mixer::onStopped(const AudioMsgId &audio) {
emit updated(audio);
if (audio.type() == AudioMsgId::Type::Voice) {
emit unsuppressSong();
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
if (type == AudioMsgId::Type::Voice) {
if (auto current = trackForType(type)) {
if (current->state.id == audio) {
emit unsuppressSong();
}
}
}
}
@ -591,6 +613,13 @@ bool Mixer::fadedStop(AudioMsgId::Type type, bool *fadedStart) {
}
void Mixer::play(const AudioMsgId &audio, int64 position) {
setSongVolume(Global::SongVolume());
play(audio, std::unique_ptr<VideoSoundData>(), position);
}
void Mixer::play(const AudioMsgId &audio, std::unique_ptr<VideoSoundData> videoData, int64 position) {
Expects(!videoData || audio.playId() != 0);
auto type = audio.type();
AudioMsgId stopped;
auto notLoadedYet = false;
@ -599,10 +628,31 @@ void Mixer::play(const AudioMsgId &audio, int64 position) {
Audio::AttachToDevice();
if (!AudioDevice) return;
bool fadedStart = false;
auto fadedStart = false;
auto current = trackForType(type);
if (!current) return;
if (type == AudioMsgId::Type::Video) {
auto pauseType = [this](AudioMsgId::Type type) {
auto current = trackForType(type);
switch (current->state.state) {
case State::Starting:
case State::Resuming:
case State::Playing: {
current->state.state = State::Pausing;
resetFadeStartPosition(type);
} break;
case State::Finishing: {
current->state.state = State::Pausing;
} break;
}
};
pauseType(AudioMsgId::Type::Song);
pauseType(AudioMsgId::Type::Voice);
}
if (current->state.id != audio) {
if (fadedStop(type, &fadedStart)) {
stopped = current->state.id;
@ -611,42 +661,49 @@ void Mixer::play(const AudioMsgId &audio, int64 position) {
emit loaderOnCancel(current->state.id);
emit faderOnTimer();
}
auto foundCurrent = currentIndex(type);
auto index = 0;
for (; index != kTogetherLimit; ++index) {
if (trackForType(type, index)->state.id == audio) {
*foundCurrent = index;
break;
}
}
if (index == kTogetherLimit && ++*foundCurrent >= kTogetherLimit) {
*foundCurrent -= kTogetherLimit;
}
current = trackForType(type);
}
current->state.id = audio;
current->file = audio.audio()->location(true);
current->data = audio.audio()->data();
if (current->file.isEmpty() && current->data.isEmpty()) {
notLoadedYet = true;
if (audio.type() == AudioMsgId::Type::Song) {
setStoppedState(current);
if (type == AudioMsgId::Type::Video) {
current->clear();
} else {
setStoppedState(current, State::StoppedAtError);
auto foundCurrent = currentIndex(type);
auto index = 0;
for (; index != kTogetherLimit; ++index) {
if (trackForType(type, index)->state.id == audio) {
*foundCurrent = index;
break;
}
}
if (index == kTogetherLimit && ++*foundCurrent >= kTogetherLimit) {
*foundCurrent -= kTogetherLimit;
}
current = trackForType(type);
}
}
current->state.id = audio;
current->lastUpdateWhen = 0;
current->lastUpdateCorrectedMs = 0;
if (videoData) {
current->videoData = std::move(videoData);
} else {
current->file = audio.audio()->location(true);
current->data = audio.audio()->data();
notLoadedYet = (current->file.isEmpty() && current->data.isEmpty());
}
if (notLoadedYet) {
auto newState = (type == AudioMsgId::Type::Song) ? State::Stopped : State::StoppedAtError;
setStoppedState(current, newState);
} else {
current->state.position = position;
current->state.state = fadedStart ? State::Starting : State::Playing;
current->state.state = current->videoData ? State::Paused : fadedStart ? State::Starting : State::Playing;
current->loading = true;
emit loaderOnStart(audio, position);
emit loaderOnStart(current->state.id, position);
if (type == AudioMsgId::Type::Voice) {
emit suppressSong();
}
}
}
if (notLoadedYet) {
if (audio.type() == AudioMsgId::Type::Song) {
if (type == AudioMsgId::Type::Song || type == AudioMsgId::Type::Video) {
DocumentOpenClickHandler::doOpen(audio.audio(), App::histItemById(audio.contextId()));
} else {
onError(audio);
@ -657,83 +714,58 @@ void Mixer::play(const AudioMsgId &audio, int64 position) {
}
}
void Mixer::initFromVideo(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data, int64 position) {
AudioMsgId stopped;
{
QMutexLocker lock(&AudioMutex);
// Pause current song.
auto songType = AudioMsgId::Type::Song;
auto currentSong = trackForType(songType);
switch (currentSong->state.state) {
case State::Starting:
case State::Resuming:
case State::Playing: {
currentSong->state.state = State::Pausing;
resetFadeStartPosition(songType);
} break;
case State::Finishing: {
currentSong->state.state = State::Pausing;
} break;
}
auto type = AudioMsgId::Type::Video;
auto current = trackForType(type);
t_assert(current != nullptr);
if (current->state.id) {
fadedStop(type);
stopped = current->state.id;
emit loaderOnCancel(current->state.id);
}
emit faderOnTimer();
current->clear();
current->state.id = AudioMsgId(AudioMsgId::Type::Video);
current->videoPlayId = videoPlayId;
current->videoData = std::move(data);
{
QMutexLocker videoLock(&_lastVideoMutex);
_lastVideoPlayId = current->videoPlayId;
_lastVideoPlaybackWhen = 0;
_lastVideoPlaybackCorrectedMs = 0;
}
_loader->startFromVideo(current->videoPlayId);
current->state.state = State::Paused;
current->loading = true;
emit loaderOnStart(current->state.id, position);
}
if (stopped) emit updated(stopped);
void Mixer::feedFromVideo(VideoSoundPart &&part) {
_loader->feedFromVideo(std::move(part));
}
void Mixer::stopFromVideo(uint64 videoPlayId) {
TimeMs Mixer::getVideoCorrectedTime(const AudioMsgId &audio, TimeMs frameMs, TimeMs systemMs) {
auto result = frameMs;
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
auto track = trackForType(type);
if (track && track->state.id == audio && track->lastUpdateWhen > 0) {
result = static_cast<TimeMs>(track->lastUpdateCorrectedMs);
if (systemMs > track->lastUpdateWhen) {
result += (systemMs - track->lastUpdateWhen);
}
}
return result;
}
void Mixer::videoSoundProgress(const AudioMsgId &audio) {
auto type = audio.type();
QMutexLocker lock(&AudioMutex);
auto current = trackForType(type);
if (current && current->state.length && current->state.frequency) {
if (current->state.id == audio && current->state.state == State::Playing) {
current->lastUpdateWhen = getms();
current->lastUpdateCorrectedMs = (current->state.position * 1000ULL) / current->state.frequency;
}
}
}
bool Mixer::checkCurrentALError(AudioMsgId::Type type) {
if (!Audio::PlaybackErrorHappened()) return true;
auto data = trackForType(type);
if (!data) {
setStoppedState(data, State::StoppedAtError);
onError(data->state.id);
}
return false;
}
void Mixer::pause(const AudioMsgId &audio, bool fast) {
AudioMsgId current;
{
QMutexLocker lock(&AudioMutex);
auto track = trackForType(AudioMsgId::Type::Video);
t_assert(track != nullptr);
if (track->videoPlayId != videoPlayId) {
return;
}
current = track->state.id;
fadedStop(AudioMsgId::Type::Video);
track->clear();
}
if (current) emit updated(current);
}
void Mixer::pauseFromVideo(uint64 videoPlayId) {
AudioMsgId current;
{
QMutexLocker lock(&AudioMutex);
auto type = AudioMsgId::Type::Video;
auto type = audio.type();
auto track = trackForType(type);
t_assert(track != nullptr);
if (track->videoPlayId != videoPlayId) {
if (!track || track->state.id != audio) {
return;
}
@ -742,41 +774,44 @@ void Mixer::pauseFromVideo(uint64 videoPlayId) {
case State::Starting:
case State::Resuming:
case State::Playing: {
track->state.state = State::Paused;
track->state.state = fast ? State::Paused : State::Pausing;
resetFadeStartPosition(type);
if (track->isStreamCreated()) {
ALint state = AL_INITIAL;
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
if (!checkCurrentALError(type)) return;
if (state == AL_PLAYING) {
alSourcePause(track->stream.source);
if (!checkCurrentALError(type)) return;
}
if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong();
}
} break;
case State::Finishing: {
track->state.state = fast ? State::Paused : State::Pausing;
} break;
}
if (fast && track->isStreamCreated()) {
ALint state = AL_INITIAL;
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
if (!checkCurrentALError(type)) return;
if (state == AL_PLAYING) {
alSourcePause(track->stream.source);
if (!checkCurrentALError(type)) return;
}
}
emit faderOnTimer();
QMutexLocker videoLock(&_lastVideoMutex);
if (_lastVideoPlayId == videoPlayId) {
_lastVideoPlaybackWhen = 0;
_lastVideoPlaybackCorrectedMs = 0;
}
track->lastUpdateWhen = 0;
track->lastUpdateCorrectedMs = 0;
}
if (current) emit updated(current);
}
void Mixer::resumeFromVideo(uint64 videoPlayId) {
void Mixer::resume(const AudioMsgId &audio, bool fast) {
AudioMsgId current;
{
QMutexLocker lock(&AudioMutex);
auto type = AudioMsgId::Type::Video;
auto type = audio.type();
auto track = trackForType(type);
t_assert(track != nullptr);
if (track->videoPlayId != videoPlayId) {
if (!track || track->state.id != audio) {
return;
}
@ -797,7 +832,7 @@ void Mixer::resumeFromVideo(uint64 videoPlayId) {
}
}
}
track->state.state = State::Playing;
track->state.state = fast ? State::Playing : State::Resuming;
if (track->isStreamCreated()) {
// When starting the video audio is in paused state and
@ -817,6 +852,9 @@ void Mixer::resumeFromVideo(uint64 videoPlayId) {
alSourcePlay(track->stream.source);
if (!checkCurrentALError(type)) return;
}
if (type == AudioMsgId::Type::Voice) {
emit suppressSong();
}
}
} break;
}
@ -825,104 +863,6 @@ void Mixer::resumeFromVideo(uint64 videoPlayId) {
if (current) emit updated(current);
}
void Mixer::feedFromVideo(VideoSoundPart &&part) {
_loader->feedFromVideo(std::move(part));
}
TimeMs Mixer::getVideoCorrectedTime(uint64 playId, TimeMs frameMs, TimeMs systemMs) {
auto result = frameMs;
QMutexLocker videoLock(&_lastVideoMutex);
if (_lastVideoPlayId == playId && _lastVideoPlaybackWhen > 0) {
result = static_cast<TimeMs>(_lastVideoPlaybackCorrectedMs);
if (systemMs > _lastVideoPlaybackWhen) {
result += (systemMs - _lastVideoPlaybackWhen);
}
}
return result;
}
void Mixer::videoSoundProgress(const AudioMsgId &audio) {
auto type = audio.type();
t_assert(type == AudioMsgId::Type::Video);
QMutexLocker lock(&AudioMutex);
QMutexLocker videoLock(&_lastVideoMutex);
auto current = trackForType(type);
t_assert(current != nullptr);
if (current->videoPlayId == _lastVideoPlayId && current->state.length && current->state.frequency) {
if (current->state.state == State::Playing) {
_lastVideoPlaybackWhen = getms();
_lastVideoPlaybackCorrectedMs = (current->state.position * 1000ULL) / current->state.frequency;
}
}
}
bool Mixer::checkCurrentALError(AudioMsgId::Type type) {
if (!Audio::PlaybackErrorHappened()) return true;
auto data = trackForType(type);
if (!data) {
setStoppedState(data, State::StoppedAtError);
onError(data->state.id);
}
return false;
}
void Mixer::pauseresume(AudioMsgId::Type type, bool fast) {
QMutexLocker lock(&AudioMutex);
auto current = trackForType(type);
switch (current->state.state) {
case State::Pausing:
case State::Paused:
case State::PausedAtEnd: {
Audio::AttachToDevice();
if (current->state.state == State::Paused) {
resetFadeStartPosition(type);
} else if (current->state.state == State::PausedAtEnd) {
if (current->isStreamCreated()) {
alSourcei(current->stream.source, AL_SAMPLE_OFFSET, qMax(current->state.position - current->bufferedPosition, 0LL));
if (!checkCurrentALError(type)) return;
}
}
current->state.state = fast ? State::Playing : State::Resuming;
ALint state = AL_INITIAL;
alGetSourcei(current->stream.source, AL_SOURCE_STATE, &state);
if (!checkCurrentALError(type)) return;
if (state != AL_PLAYING) {
if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
return;
}
alSourcef(current->stream.source, AL_GAIN, ComputeVolume(type));
if (!checkCurrentALError(type)) return;
alSourcePlay(current->stream.source);
if (!checkCurrentALError(type)) return;
}
if (type == AudioMsgId::Type::Voice) emit suppressSong();
} break;
case State::Starting:
case State::Resuming:
case State::Playing: {
current->state.state = State::Pausing;
resetFadeStartPosition(type);
if (type == AudioMsgId::Type::Voice) emit unsuppressSong();
} break;
case State::Finishing: {
current->state.state = State::Pausing;
} break;
}
emit faderOnTimer();
}
void Mixer::seek(AudioMsgId::Type type, int64 position) {
QMutexLocker lock(&AudioMutex);
@ -957,7 +897,7 @@ void Mixer::seek(AudioMsgId::Type type, int64 position) {
current->state.state = State::Paused;
}
lock.unlock();
return pauseresume(type, true);
return resume(audio, true);
} break;
case State::Starting:
case State::Resuming:
@ -979,16 +919,21 @@ void Mixer::seek(AudioMsgId::Type type, int64 position) {
emit faderOnTimer();
}
void Mixer::stop(AudioMsgId::Type type) {
void Mixer::stop(const AudioMsgId &audio) {
AudioMsgId current;
{
QMutexLocker lock(&AudioMutex);
auto type = audio.type();
auto track = trackForType(type);
t_assert(track != nullptr);
if (!track || track->state.id != audio) {
return;
}
current = track->state.id;
fadedStop(type);
if (type == AudioMsgId::Type::Video) {
if (type == AudioMsgId::Type::Voice) {
emit unsuppressSong();
} else if (type == AudioMsgId::Type::Video) {
track->clear();
}
}
@ -1026,19 +971,9 @@ void Mixer::stopAndClear() {
clearAndCancel(AudioMsgId::Type::Song, index);
}
_videoTrack.clear();
_loader->stopFromVideo();
}
}
TrackState Mixer::currentVideoState(uint64 videoPlayId) {
QMutexLocker lock(&AudioMutex);
auto current = trackForType(AudioMsgId::Type::Video);
if (!current || current->videoPlayId != videoPlayId) {
return TrackState();
}
return current->state;
}
TrackState Mixer::currentState(AudioMsgId::Type type) {
QMutexLocker lock(&AudioMutex);
auto current = trackForType(type);
@ -1106,18 +1041,26 @@ void Mixer::reattachTracks() {
_videoTrack.reattach(AudioMsgId::Type::Video);
}
void Mixer::setSongVolume(float64 volume) {
_volumeSong.storeRelease(qRound(volume * kVolumeRound));
}
float64 Mixer::getSongVolume() const {
return float64(_volumeSong.loadAcquire()) / kVolumeRound;
}
void Mixer::setVideoVolume(float64 volume) {
_videoVolume.storeRelease(qRound(volume * kVideoVolumeRound));
_volumeVideo.storeRelease(qRound(volume * kVolumeRound));
}
float64 Mixer::getVideoVolume() const {
return float64(_videoVolume.loadAcquire()) / kVideoVolumeRound;
return float64(_volumeVideo.loadAcquire()) / kVolumeRound;
}
Fader::Fader(QThread *thread) : QObject()
, _timer(this)
, _suppressAllGain(1., 1.)
, _suppressSongGain(1., 1.) {
, _suppressVolumeAll(1., 1.)
, _suppressVolumeSong(1., 1.) {
moveToThread(thread);
_timer.moveToThread(thread);
connect(thread, SIGNAL(started()), this, SLOT(onInit()));
@ -1134,64 +1077,66 @@ void Fader::onTimer() {
QMutexLocker lock(&AudioMutex);
if (!mixer()) return;
bool suppressAudioChanged = false, suppressSongChanged = false;
auto volumeChangedAll = false;
auto volumeChangedSong = false;
if (_suppressAll || _suppressSongAnim) {
auto ms = getms();
auto wasSong = suppressSongGain;
if (_suppressAll) {
auto wasAudio = suppressAllGain;
if (ms >= _suppressAllEnd || ms < _suppressAllStart) {
_suppressAll = _suppressAllAnim = false;
_suppressAllGain = anim::value(1., 1.);
_suppressVolumeAll = anim::value(1., 1.);
} else if (ms > _suppressAllEnd - kFadeDuration) {
if (_suppressAllGain.to() != 1.) _suppressAllGain.start(1.);
_suppressAllGain.update(1. - ((_suppressAllEnd - ms) / float64(kFadeDuration)), anim::linear);
if (_suppressVolumeAll.to() != 1.) _suppressVolumeAll.start(1.);
_suppressVolumeAll.update(1. - ((_suppressAllEnd - ms) / float64(kFadeDuration)), anim::linear);
} else if (ms >= _suppressAllStart + st::mediaPlayerSuppressDuration) {
if (_suppressAllAnim) {
_suppressAllGain.finish();
_suppressVolumeAll.finish();
_suppressAllAnim = false;
}
} else if (ms > _suppressAllStart) {
_suppressAllGain.update((ms - _suppressAllStart) / float64(st::mediaPlayerSuppressDuration), anim::linear);
_suppressVolumeAll.update((ms - _suppressAllStart) / float64(st::mediaPlayerSuppressDuration), anim::linear);
}
suppressAllGain = _suppressAllGain.current();
suppressAudioChanged = (suppressAllGain != wasAudio);
auto wasVolumeMultiplierAll = VolumeMultiplierAll;
VolumeMultiplierAll = _suppressVolumeAll.current();
volumeChangedAll = (VolumeMultiplierAll != wasVolumeMultiplierAll);
}
if (_suppressSongAnim) {
if (ms >= _suppressSongStart + kFadeDuration) {
_suppressSongGain.finish();
_suppressVolumeSong.finish();
_suppressSongAnim = false;
} else {
_suppressSongGain.update((ms - _suppressSongStart) / float64(kFadeDuration), anim::linear);
_suppressVolumeSong.update((ms - _suppressSongStart) / float64(kFadeDuration), anim::linear);
}
}
suppressSongGain = qMin(suppressAllGain, _suppressSongGain.current());
suppressSongChanged = (suppressSongGain != wasSong);
auto wasVolumeMultiplierSong = VolumeMultiplierSong;
VolumeMultiplierSong = _suppressVolumeSong.current();
accumulate_min(VolumeMultiplierSong, VolumeMultiplierAll);
volumeChangedSong = (VolumeMultiplierSong != wasVolumeMultiplierSong);
}
bool hasFading = (_suppressAll || _suppressSongAnim);
bool hasPlaying = false;
auto hasFading = (_suppressAll || _suppressSongAnim);
auto hasPlaying = false;
auto updatePlayback = [this, &hasPlaying, &hasFading](AudioMsgId::Type type, int index, float64 suppressGain, bool suppressGainChanged) {
auto updatePlayback = [this, &hasPlaying, &hasFading](AudioMsgId::Type type, int index, float64 volumeMultiplier, bool suppressGainChanged) {
auto track = mixer()->trackForType(type, index);
if (IsStopped(track->state.state) || track->state.state == State::Paused || !track->isStreamCreated()) return;
int32 emitSignals = updateOnePlayback(track, hasPlaying, hasFading, suppressGain, suppressGainChanged);
auto emitSignals = updateOnePlayback(track, hasPlaying, hasFading, volumeMultiplier, suppressGainChanged);
if (emitSignals & EmitError) emit error(track->state.id);
if (emitSignals & EmitStopped) emit audioStopped(track->state.id);
if (emitSignals & EmitPositionUpdated) emit playPositionUpdated(track->state.id);
if (emitSignals & EmitNeedToPreload) emit needToPreload(track->state.id);
};
auto suppressGainForMusic = suppressSongGain * Global::SongVolume();
auto suppressGainForMusicChanged = suppressSongChanged || _songVolumeChanged;
auto suppressGainForMusic = ComputeVolume(AudioMsgId::Type::Song);
auto suppressGainForMusicChanged = volumeChangedSong || _volumeChangedSong;
for (auto i = 0; i != kTogetherLimit; ++i) {
updatePlayback(AudioMsgId::Type::Voice, i, suppressAllGain, suppressAudioChanged);
updatePlayback(AudioMsgId::Type::Voice, i, VolumeMultiplierAll, volumeChangedAll);
updatePlayback(AudioMsgId::Type::Song, i, suppressGainForMusic, suppressGainForMusicChanged);
}
auto suppressGainForVideo = suppressSongGain * Global::VideoVolume();
auto suppressGainForVideoChanged = suppressSongChanged || _videoVolumeChanged;
auto suppressGainForVideo = ComputeVolume(AudioMsgId::Type::Video);
auto suppressGainForVideoChanged = volumeChangedAll || _volumeChangedVideo;
updatePlayback(AudioMsgId::Type::Video, 0, suppressGainForVideo, suppressGainForVideoChanged);
_songVolumeChanged = _videoVolumeChanged = false;
_volumeChangedSong = _volumeChangedVideo = false;
if (hasFading) {
_timer.start(kCheckFadingTimeout);
@ -1204,8 +1149,9 @@ void Fader::onTimer() {
}
}
int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 suppressGain, bool suppressGainChanged) {
bool playing = false, fading = false;
int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged) {
auto playing = false;
auto fading = false;
auto errorHappened = [this, track] {
if (Audio::PlaybackErrorHappened()) {
@ -1258,7 +1204,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
emitSignals |= EmitStopped;
} else if (TimeMs(1000) * fadingForSamplesCount >= kFadeDuration * track->state.frequency) {
fading = false;
alSourcef(track->stream.source, AL_GAIN, 1. * suppressGain);
alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
if (errorHappened()) return EmitError;
switch (track->state.state) {
@ -1286,7 +1232,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
if (track->state.state == State::Pausing || track->state.state == State::Finishing) {
newGain = 1. - newGain;
}
alSourcef(track->stream.source, AL_GAIN, newGain * suppressGain);
alSourcef(track->stream.source, AL_GAIN, newGain * volumeMultiplier);
if (errorHappened()) return EmitError;
}
} else if (playing && (state == AL_PLAYING || !track->loading)) {
@ -1299,8 +1245,8 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
}
setStoppedState(track, State::StoppedAtEnd);
emitSignals |= EmitStopped;
} else if (suppressGainChanged) {
alSourcef(track->stream.source, AL_GAIN, suppressGain);
} else if (volumeChanged) {
alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
if (errorHappened()) return EmitError;
}
}
@ -1333,7 +1279,7 @@ void Fader::onSuppressSong() {
_suppressSong = true;
_suppressSongAnim = true;
_suppressSongStart = getms();
_suppressSongGain.start(st::suppressSong);
_suppressVolumeSong.start(kSuppressRatioSong);
onTimer();
}
}
@ -1343,7 +1289,7 @@ void Fader::onUnsuppressSong() {
_suppressSong = false;
_suppressSongAnim = true;
_suppressSongStart = getms();
_suppressSongGain.start(1.);
_suppressVolumeSong.start(1.);
onTimer();
}
}
@ -1355,17 +1301,17 @@ void Fader::onSuppressAll(qint64 duration) {
_suppressAllStart = now;
}
_suppressAllEnd = now + duration;
_suppressAllGain.start(st::suppressAll);
_suppressVolumeAll.start(kSuppressRatioAll);
onTimer();
}
void Fader::onSongVolumeChanged() {
_songVolumeChanged = true;
_volumeChangedSong = true;
onTimer();
}
void Fader::onVideoVolumeChanged() {
_videoVolumeChanged = true;
_volumeChangedVideo = true;
onTimer();
}

View File

@ -114,18 +114,15 @@ public:
Mixer();
void play(const AudioMsgId &audio, int64 position = 0);
void pauseresume(AudioMsgId::Type type, bool fast = false);
void play(const AudioMsgId &audio, std::unique_ptr<VideoSoundData> videoData, int64 position = 0);
void pause(const AudioMsgId &audio, bool fast = false);
void resume(const AudioMsgId &audio, bool fast = false);
void seek(AudioMsgId::Type type, int64 position); // type == AudioMsgId::Type::Song
void stop(AudioMsgId::Type type);
void stop(const AudioMsgId &audio);
// Video player audio stream interface.
void initFromVideo(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data, int64 position);
void feedFromVideo(VideoSoundPart &&part);
int64 getVideoCorrectedTime(uint64 playId, TimeMs frameMs, TimeMs systemMs);
TrackState currentVideoState(uint64 videoPlayId);
void stopFromVideo(uint64 videoPlayId);
void pauseFromVideo(uint64 videoPlayId);
void resumeFromVideo(uint64 videoPlayId);
int64 getVideoCorrectedTime(const AudioMsgId &id, TimeMs frameMs, TimeMs systemMs);
void stopAndClear();
@ -143,6 +140,8 @@ public:
void reattachTracks();
// Thread: Any.
void setSongVolume(float64 volume);
float64 getSongVolume() const;
void setVideoVolume(float64 volume);
float64 getVideoVolume() const;
@ -177,7 +176,9 @@ private:
public:
static constexpr int kBuffersCount = 3;
// Thread: Any. Must be locked: AudioMutex.
void reattach(AudioMsgId::Type type);
void detach();
void clear();
void started();
@ -187,6 +188,8 @@ private:
int getNotQueuedBufferIndex();
~Track();
TrackState state;
FileLocation file;
@ -207,10 +210,11 @@ private:
uint32 buffers[kBuffersCount] = { 0 };
};
Stream stream;
uint64 videoPlayId = 0;
std::unique_ptr<VideoSoundData> videoData;
TimeMs lastUpdateWhen = 0;
TimeMs lastUpdateCorrectedMs = 0;
private:
void createStream();
void destroyStream();
@ -232,13 +236,9 @@ private:
Track _songTracks[kTogetherLimit];
Track _videoTrack;
QAtomicInt _videoVolume;
uint64 _lastVideoPlayId = 0;
TimeMs _lastVideoPlaybackWhen = 0;
TimeMs _lastVideoPlaybackCorrectedMs = 0;
QMutex _lastVideoMutex;
QMutex _mutex;
QAtomicInt _volumeVideo;
QAtomicInt _volumeSong;
friend class Fader;
friend class Loaders;
@ -280,18 +280,20 @@ private:
EmitPositionUpdated = 0x04,
EmitNeedToPreload = 0x08,
};
int32 updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 suppressGain, bool suppressGainChanged);
int32 updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged);
void setStoppedState(Mixer::Track *track, State state = State::Stopped);
QTimer _timer;
bool _volumeChangedSong = false;
bool _volumeChangedVideo = false;
bool _suppressAll = false;
bool _suppressAllAnim = false;
bool _suppressSong = false;
bool _suppressSongAnim = false;
bool _songVolumeChanged = false;
bool _videoVolumeChanged = false;
anim::value _suppressAllGain, _suppressSongGain;
anim::value _suppressVolumeAll;
anim::value _suppressVolumeSong;
TimeMs _suppressAllStart = 0;
TimeMs _suppressAllEnd = 0;
TimeMs _suppressSongStart = 0;

View File

@ -20,6 +20,10 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
*/
#pragma once
namespace FFMpeg {
struct AVPacketDataWrap;
} // namespace FFMpeg
class AudioPlayerLoader {
public:
AudioPlayerLoader(const FileLocation &file, const QByteArray &data, base::byte_vector &&bytes);
@ -40,6 +44,9 @@ public:
EndOfFile,
};
virtual ReadResult readMore(QByteArray &samples, int64 &samplesCount) = 0;
virtual void enqueuePackets(QQueue<FFMpeg::AVPacketDataWrap> &packets) {
Unexpected("enqueuePackets() call on not ChildFFMpegLoader.");
}
void saveDecodedSamples(QByteArray *samples, int64 *samplesCount);
void takeSavedDecodedSamples(QByteArray *samples, int64 *samplesCount);

View File

@ -35,38 +35,43 @@ Loaders::Loaders(QThread *thread) : _fromVideoNotify([this] { videoSoundAdded();
}
void Loaders::feedFromVideo(VideoSoundPart &&part) {
bool invoke = false;
auto invoke = false;
{
QMutexLocker lock(&_fromVideoMutex);
if (_fromVideoPlayId == part.videoPlayId) {
_fromVideoQueue.enqueue(FFMpeg::dataWrapFromPacket(*part.packet));
invoke = true;
} else {
FFMpeg::freePacket(part.packet);
}
_fromVideoQueues[part.audio].enqueue(FFMpeg::dataWrapFromPacket(*part.packet));
invoke = true;
}
if (invoke) {
_fromVideoNotify.call();
}
}
void Loaders::startFromVideo(uint64 videoPlayId) {
QMutexLocker lock(&_fromVideoMutex);
_fromVideoPlayId = videoPlayId;
clearFromVideoQueue();
}
void Loaders::stopFromVideo() {
startFromVideo(0);
}
void Loaders::videoSoundAdded() {
bool waitingAndAdded = false;
auto waitingAndAdded = false;
auto queues = decltype(_fromVideoQueues)();
{
QMutexLocker lock(&_fromVideoMutex);
if (_videoLoader && _videoLoader->playId() == _fromVideoPlayId && !_fromVideoQueue.isEmpty()) {
_videoLoader->enqueuePackets(_fromVideoQueue);
waitingAndAdded = _videoLoader->holdsSavedDecodedSamples();
queues = base::take(_fromVideoQueues);
}
auto tryLoader = [this](auto &audio, auto &loader, auto &it) {
if (audio == it.key() && loader) {
loader->enqueuePackets(it.value());
if (loader->holdsSavedDecodedSamples()) {
onLoad(audio);
}
return true;
}
return false;
};
for (auto i = queues.begin(), e = queues.end(); i != e; ++i) {
if (!tryLoader(_audio, _audioLoader, i)
&& !tryLoader(_song, _songLoader, i)
&& !tryLoader(_video, _videoLoader, i)) {
for (auto &packetData : i.value()) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}
}
if (waitingAndAdded) {
@ -80,11 +85,13 @@ Loaders::~Loaders() {
}
void Loaders::clearFromVideoQueue() {
auto queue = base::take(_fromVideoQueue);
for (auto &packetData : queue) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
auto queues = base::take(_fromVideoQueues);
for (auto &queue : queues) {
for (auto &packetData : queue) {
AVPacket packet;
FFMpeg::packetFromDataWrap(packet, packetData);
FFMpeg::freePacket(&packet);
}
}
}
@ -316,22 +323,22 @@ AudioPlayerLoader *Loaders::setupLoader(const AudioMsgId &audio, SetupError &err
switch (audio.type()) {
case AudioMsgId::Type::Voice: _audio = audio; loader = &_audioLoader; break;
case AudioMsgId::Type::Song: _song = audio; loader = &_songLoader; break;
case AudioMsgId::Type::Video: _video = audio; break;
case AudioMsgId::Type::Video: _video = audio; loader = &_videoLoader; break;
}
if (audio.type() == AudioMsgId::Type::Video) {
if (audio.playId()) {
if (!track->videoData) {
clear(audio.type());
track->state.state = State::StoppedAtError;
emit error(audio);
LOG(("Audio Error: video sound data not ready"));
return nullptr;
}
_videoLoader = std::make_unique<ChildFFMpegLoader>(track->videoPlayId, std::move(track->videoData));
l = _videoLoader.get();
*loader = std::make_unique<ChildFFMpegLoader>(std::move(track->videoData));
} else {
*loader = std::make_unique<FFMpegLoader>(track->file, track->data, base::byte_vector());
l = loader->get();
}
l = loader->get();
if (!l->open(position)) {
track->state.state = State::StoppedAtStart;

View File

@ -35,8 +35,6 @@ class Loaders : public QObject {
public:
Loaders(QThread *thread);
void startFromVideo(uint64 videoPlayId);
void stopFromVideo();
void feedFromVideo(VideoSoundPart &&part);
~Loaders();
@ -58,11 +56,10 @@ private:
AudioMsgId _audio, _song, _video;
std::unique_ptr<AudioPlayerLoader> _audioLoader;
std::unique_ptr<AudioPlayerLoader> _songLoader;
std::unique_ptr<ChildFFMpegLoader> _videoLoader;
std::unique_ptr<AudioPlayerLoader> _videoLoader;
QMutex _fromVideoMutex;
uint64 _fromVideoPlayId;
QQueue<FFMpeg::AVPacketDataWrap> _fromVideoQueue;
QMap<AudioMsgId, QQueue<FFMpeg::AVPacketDataWrap>> _fromVideoQueues;
SingleQueuedInvokation _fromVideoNotify;
void emitError(AudioMsgId::Type type);

View File

@ -32,8 +32,7 @@ VideoSoundData::~VideoSoundData() {
}
}
ChildFFMpegLoader::ChildFFMpegLoader(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data) : AudioPlayerLoader(FileLocation(), QByteArray(), base::byte_vector())
, _videoPlayId(videoPlayId)
ChildFFMpegLoader::ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data) : AudioPlayerLoader(FileLocation(), QByteArray(), base::byte_vector())
, _parentData(std::move(data)) {
_frame = av_frame_alloc();
}

View File

@ -35,13 +35,14 @@ extern "C" {
struct VideoSoundData {
AVCodecContext *context = nullptr;
int32 frequency = Media::Player::kDefaultFrequency;
TimeMs length = 0;
int64 length = 0;
~VideoSoundData();
};
struct VideoSoundPart {
AVPacket *packet = nullptr;
uint64 videoPlayId = 0;
AudioMsgId audio;
uint32 playId = 0;
};
namespace FFMpeg {
@ -82,7 +83,7 @@ inline void freePacket(AVPacket *packet) {
class ChildFFMpegLoader : public AudioPlayerLoader {
public:
ChildFFMpegLoader(uint64 videoPlayId, std::unique_ptr<VideoSoundData> &&data);
ChildFFMpegLoader(std::unique_ptr<VideoSoundData> &&data);
bool open(qint64 &position) override;
@ -103,11 +104,8 @@ public:
}
ReadResult readMore(QByteArray &result, int64 &samplesAdded) override;
void enqueuePackets(QQueue<FFMpeg::AVPacketDataWrap> &packets);
void enqueuePackets(QQueue<FFMpeg::AVPacketDataWrap> &packets) override;
uint64 playId() const {
return _videoPlayId;
}
bool eofReached() const {
return _eofReached;
}
@ -126,7 +124,6 @@ private:
int32 _maxResampleSamples = 1024;
uint8_t **_dstSamplesData = nullptr;
uint64 _videoPlayId = 0;
std::unique_ptr<VideoSoundData> _parentData;
AVSampleFormat _inputFormat;
AVFrame *_frame = nullptr;

View File

@ -56,8 +56,8 @@ bool isAlignedImage(const QImage &image) {
} // namespace
FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId) : ReaderImplementation(location, data)
, _playId(playId) {
FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data, const AudioMsgId &audio) : ReaderImplementation(location, data)
, _audioMsgId(audio) {
_frame = av_frame_alloc();
av_init_packet(&_packetNull);
_packetNull.data = nullptr;
@ -186,7 +186,7 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(Time
}
// sync by audio stream
auto correctMs = (frameMs >= 0) ? Player::mixer()->getVideoCorrectedTime(_playId, frameMs, systemMs) : frameMs;
auto correctMs = (frameMs >= 0) ? Player::mixer()->getVideoCorrectedTime(_audioMsgId, frameMs, systemMs) : frameMs;
if (!_frameRead) {
auto readResult = readNextFrame();
if (readResult != ReadResult::Success) {
@ -220,18 +220,18 @@ TimeMs FFMpegReaderImplementation::durationMs() const {
void FFMpegReaderImplementation::pauseAudio() {
if (_audioStreamId >= 0) {
Player::mixer()->pauseFromVideo(_playId);
Player::mixer()->pause(_audioMsgId, true);
}
}
void FFMpegReaderImplementation::resumeAudio() {
if (_audioStreamId >= 0) {
Player::mixer()->resumeFromVideo(_playId);
Player::mixer()->resume(_audioMsgId, true);
}
}
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
t_assert(_frameRead);
Expects(_frameRead);
_frameRead = false;
if (!_width || !_height) {
@ -371,7 +371,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
if (_mode == Mode::Inspecting) {
_hasAudioStream = (_audioStreamId >= 0);
_audioStreamId = -1;
} else if (_mode == Mode::Silent || !_playId) {
} else if (_mode == Mode::Silent || !_audioMsgId.playId()) {
_audioStreamId = -1;
}
@ -427,7 +427,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
if (_audioStreamId >= 0) {
auto position = (positionMs * soundData->frequency) / 1000LL;
Player::mixer()->initFromVideo(_playId, std::move(soundData), position);
Player::mixer()->play(_audioMsgId, std::move(soundData), position);
}
if (readResult == PacketResult::Ok) {
@ -481,7 +481,7 @@ QString FFMpegReaderImplementation::logData() const {
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
if (_audioStreamId >= 0) {
Player::mixer()->stopFromVideo(_playId);
Player::mixer()->stop(_audioMsgId);
}
clearPacketQueue();
@ -517,7 +517,7 @@ FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(
// queue terminating packet to audio player
VideoSoundPart part;
part.packet = &_packetNull;
part.videoPlayId = _playId;
part.audio = _audioMsgId;
Player::mixer()->feedFromVideo(std::move(part));
}
return PacketResult::EndOfFile;
@ -543,7 +543,7 @@ void FFMpegReaderImplementation::processPacket(AVPacket *packet) {
// queue packet to audio player
VideoSoundPart part;
part.packet = packet;
part.videoPlayId = _playId;
part.audio = _audioMsgId;
Player::mixer()->feedFromVideo(std::move(part));
}
} else {

View File

@ -35,7 +35,7 @@ namespace internal {
class FFMpegReaderImplementation : public ReaderImplementation {
public:
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, const AudioMsgId &audio);
ReadResult readFramesTill(TimeMs frameMs, TimeMs systemMs) override;
@ -110,7 +110,7 @@ private:
bool _hasAudioStream = false;
int _audioStreamId = -1;
uint64 _playId = 0;
AudioMsgId _audioMsgId;
TimeMs _lastReadVideoMs = 0;
TimeMs _lastReadAudioMs = 0;

View File

@ -91,11 +91,22 @@ QPixmap PrepareFrame(const FrameRequest &request, const QImage &original, bool h
} // namespace
Reader::Reader(const FileLocation &location, const QByteArray &data, Callback &&callback, Mode mode, int64 seekMs)
Reader::Reader(const QString &filepath, Callback &&callback, Mode mode, int64 seekMs)
: _callback(std::move(callback))
, _mode(mode)
, _playId(rand_value<uint64>())
, _seekPositionMs(seekMs) {
init(FileLocation(filepath), QByteArray());
}
Reader::Reader(gsl::not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode, int64 seekMs)
: _callback(std::move(callback))
, _mode(mode)
, _audioMsgId(document, msgId, (mode == Mode::Video) ? rand_value<uint32>() : 0)
, _seekPositionMs(seekMs) {
init(document->location(), document->data());
}
void Reader::init(const FileLocation &location, const QByteArray &data) {
if (threads.size() < ClipThreadsCount) {
_threadIndex = threads.size();
threads.push_back(new QThread());
@ -338,7 +349,7 @@ class ReaderPrivate {
public:
ReaderPrivate(Reader *reader, const FileLocation &location, const QByteArray &data) : _interface(reader)
, _mode(reader->mode())
, _playId(reader->playId())
, _audioMsgId(reader->audioMsgId())
, _seekPositionMs(reader->seekPositionMs())
, _data(data) {
if (_data.isEmpty()) {
@ -361,9 +372,8 @@ public:
// If seek was done to the end: try to read the first frame,
// get the frame size and return a black frame with that size.
auto firstFramePlayId = 0LL;
auto firstFramePositionMs = 0LL;
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, firstFramePlayId);
auto firstFramePositionMs = TimeMs(0);
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, AudioMsgId());
if (reader->start(internal::ReaderImplementation::Mode::Normal, firstFramePositionMs)) {
auto firstFrameReadResult = reader->readFramesTill(-1, ms);
if (firstFrameReadResult == internal::ReaderImplementation::ReadResult::Success) {
@ -470,7 +480,7 @@ public:
}
}
_implementation = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, _playId);
_implementation = std::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, _audioMsgId);
// _implementation = new QtGifReaderImplementation(_location, &_data);
auto implementationMode = [this]() {
@ -532,7 +542,7 @@ private:
Reader *_interface;
State _state = State::Reading;
Reader::Mode _mode;
uint64 _playId;
AudioMsgId _audioMsgId;
TimeMs _seekPositionMs = 0;
QByteArray _data;
@ -844,9 +854,8 @@ FileLoadTask::Video PrepareForSending(const QString &fname, const QByteArray &da
auto localLocation = FileLocation(fname);
auto localData = QByteArray(data);
auto playId = 0ULL;
auto seekPositionMs = 0LL;
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(&localLocation, &localData, playId);
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(&localLocation, &localData, AudioMsgId());
if (reader->start(internal::ReaderImplementation::Mode::Inspecting, seekPositionMs)) {
auto durationMs = reader->durationMs();
if (durationMs > 0) {

View File

@ -61,7 +61,9 @@ public:
Video,
};
Reader(const FileLocation &location, const QByteArray &data, Callback &&callback, Mode mode = Mode::Gif, TimeMs seekMs = 0);
Reader(const QString &filepath, Callback &&callback, Mode mode = Mode::Gif, TimeMs seekMs = 0);
Reader(gsl::not_null<DocumentData*> document, FullMsgId msgId, Callback &&callback, Mode mode = Mode::Gif, TimeMs seekMs = 0);
static void callback(Reader *reader, int threadIndex, Notification notification); // reader can be deleted
void setAutoplay() {
@ -71,8 +73,8 @@ public:
return _autoplay;
}
uint64 playId() const {
return _playId;
AudioMsgId audioMsgId() const {
return _audioMsgId;
}
TimeMs seekPositionMs() const {
return _seekPositionMs;
@ -126,13 +128,14 @@ public:
~Reader();
private:
void init(const FileLocation &location, const QByteArray &data);
Callback _callback;
Mode _mode;
State _state = State::Reading;
uint64 _playId;
AudioMsgId _audioMsgId;
bool _hasAudio = false;
TimeMs _durationMs = 0;
TimeMs _seekPositionMs = 0;
@ -157,9 +160,9 @@ private:
TimeMs positionMs = 0;
};
mutable Frame _frames[3];
Frame *frameToShow(int *index = 0) const; // 0 means not ready
Frame *frameToWrite(int *index = 0) const; // 0 means not ready
Frame *frameToWriteNext(bool check, int *index = 0) const;
Frame *frameToShow(int *index = nullptr) const; // 0 means not ready
Frame *frameToWrite(int *index = nullptr) const; // 0 means not ready
Frame *frameToWriteNext(bool check, int *index = nullptr) const;
void moveToNextShow() const;
void moveToNextWrite() const;

View File

@ -113,6 +113,7 @@ CoverWidget::CoverWidget(QWidget *parent) : TWidget(parent)
updateVolumeToggleIcon();
_volumeToggle->setClickedCallback([this]() {
Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume());
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify();
});
subscribe(Global::RefSongVolumeChanged(), [this] { updateVolumeToggleIcon(); });

View File

@ -176,8 +176,8 @@ void Instance::play() {
if (state.id) {
if (IsStopped(state.state)) {
mixer()->play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->pauseresume(AudioMsgId::Type::Song);
} else {
mixer()->resume(state.id);
}
} else if (_current) {
mixer()->play(_current);
@ -198,16 +198,15 @@ void Instance::play(const AudioMsgId &audioId) {
void Instance::pause(AudioMsgId::Type type) {
auto state = mixer()->currentState(type);
if (state.id) {
if (!IsStopped(state.state)) {
if (state.state == State::Starting || state.state == State::Resuming || state.state == State::Playing || state.state == State::Finishing) {
mixer()->pauseresume(type);
}
}
mixer()->pause(state.id);
}
}
void Instance::stop() {
mixer()->stop(AudioMsgId::Type::Song);
auto state = mixer()->currentState(AudioMsgId::Type::Song);
if (state.id) {
mixer()->stop(state.id);
}
}
void Instance::playPause() {
@ -215,8 +214,10 @@ void Instance::playPause() {
if (state.id) {
if (IsStopped(state.state)) {
mixer()->play(state.id);
} else if (IsPaused(state.state) || state.state == State::Pausing) {
mixer()->resume(state.id);
} else {
mixer()->pauseresume(AudioMsgId::Type::Song);
mixer()->pause(state.id);
}
} else if (_current) {
mixer()->play(_current);

View File

@ -74,6 +74,7 @@ void VolumeController::setVolume(float64 volume) {
void VolumeController::applyVolumeChange(float64 volume) {
if (volume != Global::SongVolume()) {
Global::SetSongVolume(volume);
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify();
}
}

View File

@ -121,6 +121,7 @@ Widget::Widget(QWidget *parent) : TWidget(parent)
updateVolumeToggleIcon();
_volumeToggle->setClickedCallback([this] {
Global::SetSongVolume((Global::SongVolume() > 0) ? 0. : Global::RememberedSongVolume());
mixer()->setSongVolume(Global::SongVolume());
Global::RefSongVolumeChanged().notify();
});
subscribe(Global::RefSongVolumeChanged(), [this] { updateVolumeToggleIcon(); });

View File

@ -1257,7 +1257,7 @@ void MediaView::displayDocument(DocumentData *doc, HistoryItem *item) { // empty
} else if (_doc->isTheme()) {
initThemePreview();
} else {
const FileLocation &location(_doc->location(true));
auto &location = _doc->location(true);
if (location.accessEnable()) {
if (QImageReader(location.name()).canRead()) {
_current = App::pixmapFromImageInPlace(App::readImage(location.name(), 0, false));
@ -1441,7 +1441,7 @@ void MediaView::createClipReader() {
_current = _doc->thumb->pixNoCache(_doc->thumb->width(), _doc->thumb->height(), videoThumbOptions(), st::mediaviewFileIconSize, st::mediaviewFileIconSize);
}
auto mode = (_doc->isVideo() || _doc->isRoundVideo()) ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif;
_gif = std::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), [this](Media::Clip::Notification notification) {
_gif = Media::Clip::MakeReader(_doc, FullMsgId(_channel, _msgid), [this](Media::Clip::Notification notification) {
clipCallback(notification);
}, mode);
@ -1557,7 +1557,7 @@ void MediaView::restartVideoAtSeekPosition(TimeMs positionMs) {
auto rounding = (_doc && _doc->isRoundVideo()) ? ImageRoundRadius::Ellipse : ImageRoundRadius::None;
_current = _gif->current(_gif->width() / cIntRetinaFactor(), _gif->height() / cIntRetinaFactor(), _gif->width() / cIntRetinaFactor(), _gif->height() / cIntRetinaFactor(), rounding, ImageRoundCorner::All, getms());
}
_gif = std::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), [this](Media::Clip::Notification notification) {
_gif = Media::Clip::MakeReader(_doc, FullMsgId(_channel, _msgid), [this](Media::Clip::Notification notification) {
clipCallback(notification);
}, Media::Clip::Reader::Mode::Video, positionMs);
@ -1607,16 +1607,17 @@ void MediaView::onVideoToggleFullScreen() {
}
void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
if (audioId.type() != AudioMsgId::Type::Video || !_gif) {
if (!_gif || _gif->audioMsgId() != audioId) {
return;
}
auto state = Media::Player::mixer()->currentVideoState(_gif->playId());
if (state.length) {
updateVideoPlaybackState(state);
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Video);
if (state.id == _gif->audioMsgId()) {
if (state.length) {
updateVideoPlaybackState(state);
}
AuthSession::Current().data().setLastTimeVideoPlayedAt(getms(true));
}
AuthSession::Current().data().setLastTimeVideoPlayedAt(getms(true));
}
void MediaView::updateVideoPlaybackState(const Media::Player::TrackState &state) {

View File

@ -258,7 +258,7 @@ private:
bool _pressed = false;
int32 _dragging = 0;
QPixmap _current;
std::unique_ptr<Media::Clip::Reader> _gif;
Media::Clip::ReaderPointer _gif;
int32 _full = -1; // -1 - thumb, 0 - medium, 1 - full
// Video without audio stream playback information.

View File

@ -1203,7 +1203,11 @@ void DocumentOpenClickHandler::doOpen(DocumentData *data, HistoryItem *context,
if (playVoice) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice);
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else {
auto audio = AudioMsgId(data, msgId);
Media::Player::mixer()->play(audio);
@ -1215,7 +1219,11 @@ void DocumentOpenClickHandler::doOpen(DocumentData *data, HistoryItem *context,
} else if (playMusic) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(data, msgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song);
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else {
auto song = AudioMsgId(data, msgId);
Media::Player::mixer()->play(song);
@ -1503,7 +1511,11 @@ void DocumentData::performActionOnLoad() {
if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Voice);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Voice);
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) {
Media::Player::mixer()->play(AudioMsgId(this, _actionOnLoadMsgId));
if (App::main()) App::main()->mediaMarkRead(this);
@ -1513,7 +1525,11 @@ void DocumentData::performActionOnLoad() {
if (loaded()) {
auto state = Media::Player::mixer()->currentState(AudioMsgId::Type::Song);
if (state.id == AudioMsgId(this, _actionOnLoadMsgId) && !Media::Player::IsStopped(state.state) && state.state != State::Finishing) {
Media::Player::mixer()->pauseresume(AudioMsgId::Type::Song);
if (Media::Player::IsPaused(state.state) || state.state == State::Pausing) {
Media::Player::mixer()->resume(state.id);
} else {
Media::Player::mixer()->pause(state.id);
}
} else if (Media::Player::IsStopped(state.state)) {
auto song = AudioMsgId(this, _actionOnLoadMsgId);
Media::Player::mixer()->play(song);

View File

@ -1337,16 +1337,10 @@ public:
Video,
};
AudioMsgId() {
}
AudioMsgId(DocumentData *audio, const FullMsgId &msgId) : _audio(audio), _contextId(msgId) {
AudioMsgId() = default;
AudioMsgId(DocumentData *audio, const FullMsgId &msgId, uint32 playId = 0) : _audio(audio), _contextId(msgId), _playId(playId) {
setTypeFromAudio();
}
AudioMsgId(DocumentData *audio, ChannelId channelId, MsgId msgId) : _audio(audio), _contextId(channelId, msgId) {
setTypeFromAudio();
}
AudioMsgId(Type type) : _type(type) {
}
Type type() const {
return _type;
@ -1357,14 +1351,17 @@ public:
FullMsgId contextId() const {
return _contextId;
}
uint32 playId() const {
return _playId;
}
explicit operator bool() const {
return _audio || (_type == Type::Video);
return _audio != nullptr;
}
private:
void setTypeFromAudio() {
if (_audio->voice()) {
if (_audio->voice() || _audio->isRoundVideo()) {
_type = Type::Voice;
} else if (_audio->isVideo()) {
_type = Type::Video;
@ -1378,14 +1375,24 @@ private:
DocumentData *_audio = nullptr;
Type _type = Type::Unknown;
FullMsgId _contextId;
uint32 _playId = 0;
};
inline bool operator<(const AudioMsgId &a, const AudioMsgId &b) {
return quintptr(a.audio()) < quintptr(b.audio()) || (quintptr(a.audio()) == quintptr(b.audio()) && a.contextId() < b.contextId());
if (quintptr(a.audio()) < quintptr(b.audio())) {
return true;
} else if (quintptr(b.audio()) < quintptr(a.audio())) {
return false;
} else if (a.contextId() < b.contextId()) {
return true;
} else if (b.contextId() < a.contextId()) {
return false;
}
return (a.playId() < b.playId());
}
inline bool operator==(const AudioMsgId &a, const AudioMsgId &b) {
return a.audio() == b.audio() && a.contextId() == b.contextId();
return a.audio() == b.audio() && a.contextId() == b.contextId() && a.playId() == b.playId();
}
inline bool operator!=(const AudioMsgId &a, const AudioMsgId &b) {
return !(a == b);