diff --git a/Telegram/SourceFiles/mainwidget.cpp b/Telegram/SourceFiles/mainwidget.cpp index 5f625b8c0..8c44c5c16 100644 --- a/Telegram/SourceFiles/mainwidget.cpp +++ b/Telegram/SourceFiles/mainwidget.cpp @@ -1530,6 +1530,7 @@ void MainWidget::ui_autoplayMediaInlineAsync(qint32 channelId, qint32 msgId) { void MainWidget::audioPlayProgress(const AudioMsgId &audioId) { if (audioId.type() == AudioMsgId::Type::Video) { + audioPlayer()->videoSoundProgress(audioId); return; } diff --git a/Telegram/SourceFiles/media/media_audio.cpp b/Telegram/SourceFiles/media/media_audio.cpp index 85997ee1b..9bc7791c2 100644 --- a/Telegram/SourceFiles/media/media_audio.cpp +++ b/Telegram/SourceFiles/media/media_audio.cpp @@ -482,7 +482,7 @@ void AudioPlayer::play(const AudioMsgId &audio, int64 position) { if (stopped) emit updated(stopped); } -void AudioPlayer::playFromVideo(const AudioMsgId &audio, int64 position, std_::unique_ptr &&data) { +void AudioPlayer::playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr &&data, int64 position) { t_assert(audio.type() == AudioMsgId::Type::Video); auto type = audio.type(); @@ -502,8 +502,15 @@ void AudioPlayer::playFromVideo(const AudioMsgId &audio, int64 position, std_::u emit faderOnTimer(); current->clear(); current->audio = audio; + current->videoPlayId = videoPlayId; current->videoData = std_::move(data); - _loader->startFromVideo(current->videoData->videoPlayId); + { + QMutexLocker videoLock(&_lastVideoMutex); + _lastVideoPlayId = current->videoPlayId; + _lastVideoPlaybackWhen = 0; + _lastVideoPlaybackCorrectedMs = 0; + } + _loader->startFromVideo(current->videoPlayId); current->playbackState.state = AudioPlayerPlaying; current->loading = true; @@ -516,6 +523,36 @@ void AudioPlayer::feedFromVideo(VideoSoundPart &&part) { _loader->feedFromVideo(std_::move(part)); } +int64 AudioPlayer::getVideoCorrectedTime(uint64 playId, uint64 systemMs) { + int64 result = systemMs; + + QMutexLocker videoLock(&_lastVideoMutex); + if (_lastVideoPlayId == playId && _lastVideoPlaybackWhen > 0) { + result = static_cast(_lastVideoPlaybackCorrectedMs); + if (systemMs > _lastVideoPlaybackWhen) { + result += (systemMs - _lastVideoPlaybackWhen); + } + } + + return result; +} + +void AudioPlayer::videoSoundProgress(const AudioMsgId &audio) { + auto type = audio.type(); + t_assert(type == AudioMsgId::Type::Video); + + QMutexLocker lock(&playerMutex); + QMutexLocker videoLock(&_lastVideoMutex); + + auto current = dataForType(type); + t_assert(current != nullptr); + + if (current->videoPlayId == _lastVideoPlayId && current->playbackState.frequency) { + _lastVideoPlaybackWhen = getms(); + _lastVideoPlaybackCorrectedMs = (current->playbackState.position * 1000ULL) / current->playbackState.frequency; + } +} + bool AudioPlayer::checkCurrentALError(AudioMsgId::Type type) { if (_checkALError()) return true; diff --git a/Telegram/SourceFiles/media/media_audio.h b/Telegram/SourceFiles/media/media_audio.h index 3dc9fc96a..38b251a88 100644 --- a/Telegram/SourceFiles/media/media_audio.h +++ b/Telegram/SourceFiles/media/media_audio.h @@ -67,9 +67,10 @@ public: void stop(AudioMsgId::Type type); // Video player audio stream interface. - void playFromVideo(const AudioMsgId &audio, int64 position, std_::unique_ptr &&data); + void playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr &&data, int64 position); void feedFromVideo(VideoSoundPart &&part); - AudioPlaybackState getStateForVideo(uint64 playId); + int64 getVideoCorrectedTime(uint64 playId, uint64 systemMs); + void videoSoundProgress(const AudioMsgId &audio); void stopAndClear(); @@ -122,6 +123,7 @@ private: uint32 buffers[3] = { 0 }; int64 samplesCount[3] = { 0 }; + uint64 videoPlayId = 0; std_::unique_ptr videoData; private: @@ -147,8 +149,10 @@ private: AudioMsg _songData[AudioSimultaneousLimit]; AudioMsg _videoData; - uint64 _lastVideoPlayId; - AudioPlaybackState _lastVideoPlaybackState; + uint64 _lastVideoPlayId = 0; + uint64 _lastVideoPlaybackWhen = 0; + uint64 _lastVideoPlaybackCorrectedMs = 0; + QMutex _lastVideoMutex; QMutex _mutex; diff --git a/Telegram/SourceFiles/media/media_audio_loaders.cpp b/Telegram/SourceFiles/media/media_audio_loaders.cpp index 581342300..a45558a8d 100644 --- a/Telegram/SourceFiles/media/media_audio_loaders.cpp +++ b/Telegram/SourceFiles/media/media_audio_loaders.cpp @@ -361,7 +361,7 @@ AudioPlayerLoader *AudioPlayerLoaders::setupLoader(const AudioMsgId &audio, Setu LOG(("Audio Error: video sound data not ready")); return nullptr; } - _videoLoader = std_::make_unique(std_::move(data->videoData)); + _videoLoader = std_::make_unique(data->videoPlayId, std_::move(data->videoData)); l = _videoLoader.get(); } else { *loader = std_::make_unique(data->file, data->data); diff --git a/Telegram/SourceFiles/media/media_child_ffmpeg_loader.cpp b/Telegram/SourceFiles/media/media_child_ffmpeg_loader.cpp index f7688934d..c6534720b 100644 --- a/Telegram/SourceFiles/media/media_child_ffmpeg_loader.cpp +++ b/Telegram/SourceFiles/media/media_child_ffmpeg_loader.cpp @@ -33,7 +33,8 @@ VideoSoundData::~VideoSoundData() { } } -ChildFFMpegLoader::ChildFFMpegLoader(std_::unique_ptr &&data) : AudioPlayerLoader(FileLocation(), QByteArray()) +ChildFFMpegLoader::ChildFFMpegLoader(uint64 videoPlayId, std_::unique_ptr &&data) : AudioPlayerLoader(FileLocation(), QByteArray()) +, _videoPlayId(videoPlayId) , _parentData(std_::move(data)) { _frame = av_frame_alloc(); } diff --git a/Telegram/SourceFiles/media/media_child_ffmpeg_loader.h b/Telegram/SourceFiles/media/media_child_ffmpeg_loader.h index f5873472d..903893c1d 100644 --- a/Telegram/SourceFiles/media/media_child_ffmpeg_loader.h +++ b/Telegram/SourceFiles/media/media_child_ffmpeg_loader.h @@ -32,7 +32,6 @@ extern "C" { #include struct VideoSoundData { - uint64 videoPlayId = 0; AVCodecContext *context = nullptr; int32 frequency = AudioVoiceMsgFrequency; int64 length = 0; @@ -64,7 +63,7 @@ inline void freePacket(AVPacket *packet) { class ChildFFMpegLoader : public AudioPlayerLoader { public: - ChildFFMpegLoader(std_::unique_ptr &&data); + ChildFFMpegLoader(uint64 videoPlayId, std_::unique_ptr &&data); bool open(qint64 position = 0) override; @@ -88,7 +87,7 @@ public: void enqueuePackets(QQueue &packets); uint64 playId() const { - return _parentData->videoPlayId; + return _videoPlayId; } bool eofReached() const { return _eofReached; @@ -106,6 +105,7 @@ private: int32 _maxResampleSamples = 1024; uint8_t **_dstSamplesData = nullptr; + uint64 _videoPlayId = 0; std_::unique_ptr _parentData; AVSampleFormat _inputFormat; AVFrame *_frame = nullptr; diff --git a/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp b/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp index 55e2cd1e2..31903856a 100644 --- a/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp +++ b/Telegram/SourceFiles/media/media_clip_ffmpeg.cpp @@ -103,6 +103,7 @@ bool FFMpegReaderImplementation::readNextFrame() { _frameMs = frameMs; _hadFrame = _frameRead = true; + _frameTime += _currentFrameDelay; return true; } @@ -133,6 +134,44 @@ bool FFMpegReaderImplementation::readNextFrame() { return false; } +bool FFMpegReaderImplementation::readFramesTill(int64 ms) { + if (_audioStreamId >= 0) { // sync by audio stream + auto correctMs = audioPlayer()->getVideoCorrectedTime(_playId, ms); + + if (!_frameRead && !readNextFrame()) { + return false; + } + while (_frameTime <= correctMs) { + if (!readNextFrame()) { + return false; + } + } + _frameTimeCorrection = ms - correctMs; + return true; + } else { // just keep up + if (_frameRead && _frameTime > ms) { + return true; + } + if (!readNextFrame()) { + return false; + } + if (_frameTime > ms) { + return true; + } + if (!readNextFrame()) { + return false; + } + if (_frameTime <= ms) { + _frameTime = ms + 5; // keep up + } + return true; + } +} + +uint64 FFMpegReaderImplementation::framePresentationTime() const { + return static_cast(qMax(_frameTime + _frameTimeCorrection, 0LL)); +} + bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) { t_assert(_frameRead); _frameRead = false; @@ -184,10 +223,6 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q return true; } -int FFMpegReaderImplementation::nextFrameDelay() { - return _currentFrameDelay; -} - bool FFMpegReaderImplementation::start(Mode mode) { _mode = mode; @@ -276,8 +311,8 @@ bool FFMpegReaderImplementation::start(Mode mode) { } else { soundData->length = (_fmtContext->streams[_audioStreamId]->duration * soundData->frequency * _fmtContext->streams[_audioStreamId]->time_base.num) / _fmtContext->streams[_audioStreamId]->time_base.den; } - soundData->videoPlayId = _playId = rand_value(); - audioPlayer()->playFromVideo(AudioMsgId(AudioMsgId::Type::Video), 0, std_::move(soundData)); + _playId = rand_value(); + audioPlayer()->playFromVideo(AudioMsgId(AudioMsgId::Type::Video), _playId, std_::move(soundData), 0); } return true; diff --git a/Telegram/SourceFiles/media/media_clip_ffmpeg.h b/Telegram/SourceFiles/media/media_clip_ffmpeg.h index 86f54c506..0bd5d12a6 100644 --- a/Telegram/SourceFiles/media/media_clip_ffmpeg.h +++ b/Telegram/SourceFiles/media/media_clip_ffmpeg.h @@ -37,9 +37,9 @@ class FFMpegReaderImplementation : public ReaderImplementation { public: FFMpegReaderImplementation(FileLocation *location, QByteArray *data); - bool readNextFrame() override; + bool readFramesTill(int64 ms) override; + uint64 framePresentationTime() const override; bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override; - int nextFrameDelay() override; bool start(Mode mode) override; int duration() const; @@ -48,6 +48,8 @@ public: ~FFMpegReaderImplementation(); private: + bool readNextFrame(); + enum class PacketResult { Ok, EndOfFile, @@ -93,6 +95,9 @@ private: int _nextFrameDelay = 0; int _currentFrameDelay = 0; + int64 _frameTime = 0; + int64 _frameTimeCorrection = 0; + }; } // namespace internal diff --git a/Telegram/SourceFiles/media/media_clip_implementation.h b/Telegram/SourceFiles/media/media_clip_implementation.h index c54fc4b7a..e2dd48361 100644 --- a/Telegram/SourceFiles/media/media_clip_implementation.h +++ b/Telegram/SourceFiles/media/media_clip_implementation.h @@ -28,7 +28,6 @@ namespace internal { class ReaderImplementation { public: - ReaderImplementation(FileLocation *location, QByteArray *data) : _location(location) , _data(data) { @@ -38,9 +37,16 @@ public: Silent, Normal, }; - virtual bool readNextFrame() = 0; + + // Read frames till current frame will have presentation time > ms. + virtual bool readFramesTill(int64 ms) = 0; + + // Get current frame presentation time. + virtual uint64 framePresentationTime() const = 0; + + // Render current frame to an image with specific size. virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0; - virtual int nextFrameDelay() = 0; + virtual bool start(Mode mode) = 0; virtual ~ReaderImplementation() { } diff --git a/Telegram/SourceFiles/media/media_clip_qtgif.cpp b/Telegram/SourceFiles/media/media_clip_qtgif.cpp index ff911c972..8117e5e74 100644 --- a/Telegram/SourceFiles/media/media_clip_qtgif.cpp +++ b/Telegram/SourceFiles/media/media_clip_qtgif.cpp @@ -28,6 +28,29 @@ namespace internal { QtGifReaderImplementation::QtGifReaderImplementation(FileLocation *location, QByteArray *data) : ReaderImplementation(location, data) { } +bool QtGifReaderImplementation::readFramesTill(int64 ms) { + if (!_frame.isNull() && _frameTime > ms) { + return true; + } + if (!readNextFrame()) { + return false; + } + if (_frameTime > ms) { + return true; + } + if (!readNextFrame()) { + return false; + } + if (_frameTime <= ms) { + _frameTime = ms + 5; // keep up + } + return true; +} + +uint64 QtGifReaderImplementation::framePresentationTime() const { + return static_cast(qMax(_frameTime, 0LL)); +} + bool QtGifReaderImplementation::readNextFrame() { if (_reader) _frameDelay = _reader->nextImageDelay(); if (_framesLeft < 1 && !jumpToStart()) { @@ -39,6 +62,7 @@ bool QtGifReaderImplementation::readNextFrame() { return false; } --_framesLeft; + _frameTime += _frameDelay; return true; } @@ -66,10 +90,6 @@ bool QtGifReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QS return true; } -int QtGifReaderImplementation::nextFrameDelay() { - return _frameDelay; -} - bool QtGifReaderImplementation::start(Mode mode) { if (mode == Mode::OnlyGifv) return false; return jumpToStart(); diff --git a/Telegram/SourceFiles/media/media_clip_qtgif.h b/Telegram/SourceFiles/media/media_clip_qtgif.h index 910d60e40..78205ded3 100644 --- a/Telegram/SourceFiles/media/media_clip_qtgif.h +++ b/Telegram/SourceFiles/media/media_clip_qtgif.h @@ -31,18 +31,20 @@ public: QtGifReaderImplementation(FileLocation *location, QByteArray *data); - bool readNextFrame() override; + bool readFramesTill(int64 ms) override; + uint64 framePresentationTime() const override; bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override; - int nextFrameDelay() override; bool start(Mode mode) override; ~QtGifReaderImplementation(); private: bool jumpToStart(); + bool readNextFrame(); QImageReader *_reader = nullptr; int _framesLeft = 0; + int64 _frameTime = 0; int _frameDelay = 0; QImage _frame; diff --git a/Telegram/SourceFiles/media/media_clip_reader.cpp b/Telegram/SourceFiles/media/media_clip_reader.cpp index 500275a5a..09d7d96f0 100644 --- a/Telegram/SourceFiles/media/media_clip_reader.cpp +++ b/Telegram/SourceFiles/media/media_clip_reader.cpp @@ -303,7 +303,7 @@ public: return error(); } if (frame() && frame()->original.isNull()) { - if (!_implementation->readNextFrame()) { + if (!_implementation->readFramesTill(-1)) { // Read the first frame. return error(); } if (!_implementation->renderFrame(frame()->original, frame()->alpha, QSize())) { @@ -330,34 +330,17 @@ public: } ProcessResult finishProcess(uint64 ms) { - if (!readNextFrame()) { - return error(); - } - if (ms >= _nextFrameWhen && !readNextFrame(true)) { + if (!_implementation->readFramesTill(ms - _animationStarted)) { return error(); } + _nextFrameWhen = _animationStarted + _implementation->framePresentationTime(); + if (!renderFrame()) { return error(); } return ProcessResult::CopyFrame; } - uint64 nextFrameDelay() { - int32 delay = _implementation->nextFrameDelay(); - return qMax(delay, 5); - } - - bool readNextFrame(bool keepup = false) { - if (!_implementation->readNextFrame()) { - return false; - } - _nextFrameWhen += nextFrameDelay(); - if (keepup) { - _nextFrameWhen = qMax(_nextFrameWhen, getms()); - } - return true; - } - bool renderFrame() { t_assert(frame() != 0 && _request.valid()); if (!_implementation->renderFrame(frame()->original, frame()->alpha, QSize(_request.framew, _request.frameh))) { @@ -394,6 +377,10 @@ public: return _implementation->start(implementationMode()); } + void startedAt(uint64 ms) { + _animationStarted = _nextFrameWhen = ms; + } + ProcessResult error() { stop(); _state = State::Error; @@ -447,6 +434,7 @@ private: int _width = 0; int _height = 0; + uint64 _animationStarted = 0; uint64 _nextFrameWhen = 0; bool _paused = false; @@ -541,7 +529,8 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u if (result == ProcessResult::Started) { _loadLevel.fetchAndAddRelaxed(reader->_width * reader->_height - AverageGifSize); } - if (!reader->_paused && result == ProcessResult::Repaint) { + // See if we need to pause GIF because it is not displayed right now. + if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) { int32 ishowing, iprevious; Reader::Frame *showing = it.key()->frameToShow(&ishowing), *previous = it.key()->frameToWriteNext(false, &iprevious); t_assert(previous != 0 && showing != 0 && ishowing >= 0 && iprevious >= 0); @@ -561,7 +550,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u frame->original = reader->frame()->original; frame->displayed.storeRelease(0); if (result == ProcessResult::Started) { - reader->_nextFrameWhen = ms; + reader->startedAt(ms); it.key()->moveToNextWrite(); emit callback(it.key(), it.key()->threadIndex(), NotificationReinit); } @@ -701,7 +690,7 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data auto reader = std_::make_unique(&localloc, &localdata); if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv)) { bool hasAlpha = false; - if (reader->readNextFrame() && reader->renderFrame(cover, hasAlpha, QSize())) { + if (reader->readFramesTill(-1) && reader->renderFrame(cover, hasAlpha, QSize())) { if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) { if (hasAlpha) { QImage cacheForResize;