mirror of https://github.com/procxx/kepka.git
Pause/resume supported in the video player in MediaView.
Autoplay video in MediaView only from showDocument(), otherwise pause.
This commit is contained in:
parent
647759f0d1
commit
fc716af002
|
@ -2936,7 +2936,7 @@ void HistoryItem::clipCallback(Media::Clip::Notification notification) {
|
||||||
switch (notification) {
|
switch (notification) {
|
||||||
case NotificationReinit: {
|
case NotificationReinit: {
|
||||||
bool stopped = false;
|
bool stopped = false;
|
||||||
if (reader->paused()) {
|
if (reader->autoPausedGif()) {
|
||||||
if (MainWidget *m = App::main()) {
|
if (MainWidget *m = App::main()) {
|
||||||
if (!m->isItemVisible(this)) { // stop animation if it is not visible
|
if (!m->isItemVisible(this)) { // stop animation if it is not visible
|
||||||
media->stopInline();
|
media->stopInline();
|
||||||
|
|
|
@ -340,7 +340,7 @@ void Gif::clipCallback(Media::Clip::Notification notification) {
|
||||||
int32 height = st::inlineMediaHeight;
|
int32 height = st::inlineMediaHeight;
|
||||||
QSize frame = countFrameSize();
|
QSize frame = countFrameSize();
|
||||||
_gif->start(frame.width(), frame.height(), _width, height, false);
|
_gif->start(frame.width(), frame.height(), _width, height, false);
|
||||||
} else if (_gif->paused() && !Ui::isInlineItemVisible(this)) {
|
} else if (_gif->autoPausedGif() && !Ui::isInlineItemVisible(this)) {
|
||||||
delete _gif;
|
delete _gif;
|
||||||
_gif = nullptr;
|
_gif = nullptr;
|
||||||
getShownDocument()->forget();
|
getShownDocument()->forget();
|
||||||
|
|
|
@ -484,7 +484,7 @@ void AudioPlayer::play(const AudioMsgId &audio, int64 position) {
|
||||||
if (stopped) emit updated(stopped);
|
if (stopped) emit updated(stopped);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioPlayer::playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position) {
|
void AudioPlayer::initFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position) {
|
||||||
t_assert(audio.type() == AudioMsgId::Type::Video);
|
t_assert(audio.type() == AudioMsgId::Type::Video);
|
||||||
|
|
||||||
auto type = audio.type();
|
auto type = audio.type();
|
||||||
|
@ -514,7 +514,7 @@ void AudioPlayer::playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std
|
||||||
}
|
}
|
||||||
_loader->startFromVideo(current->videoPlayId);
|
_loader->startFromVideo(current->videoPlayId);
|
||||||
|
|
||||||
current->playbackState.state = AudioPlayerPlaying;
|
current->playbackState.state = AudioPlayerPaused;
|
||||||
current->loading = true;
|
current->loading = true;
|
||||||
emit loaderOnStart(audio, position);
|
emit loaderOnStart(audio, position);
|
||||||
}
|
}
|
||||||
|
@ -539,6 +539,90 @@ void AudioPlayer::stopFromVideo(uint64 videoPlayId) {
|
||||||
if (current) emit updated(current);
|
if (current) emit updated(current);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AudioPlayer::pauseFromVideo(uint64 videoPlayId) {
|
||||||
|
AudioMsgId current;
|
||||||
|
{
|
||||||
|
QMutexLocker lock(&playerMutex);
|
||||||
|
auto type = AudioMsgId::Type::Video;
|
||||||
|
auto data = dataForType(type);
|
||||||
|
t_assert(data != nullptr);
|
||||||
|
|
||||||
|
if (data->videoPlayId != videoPlayId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
current = data->audio;
|
||||||
|
switch (data->playbackState.state) {
|
||||||
|
case AudioPlayerStarting:
|
||||||
|
case AudioPlayerResuming:
|
||||||
|
case AudioPlayerPlaying: {
|
||||||
|
data->playbackState.state = AudioPlayerPaused;
|
||||||
|
updateCurrentStarted(type);
|
||||||
|
|
||||||
|
ALint state = AL_INITIAL;
|
||||||
|
alGetSourcei(data->source, AL_SOURCE_STATE, &state);
|
||||||
|
if (!checkCurrentALError(type)) return;
|
||||||
|
|
||||||
|
if (state == AL_PLAYING) {
|
||||||
|
alSourcePause(data->source);
|
||||||
|
if (!checkCurrentALError(type)) return;
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
}
|
||||||
|
emit faderOnTimer();
|
||||||
|
}
|
||||||
|
if (current) emit updated(current);
|
||||||
|
}
|
||||||
|
|
||||||
|
void AudioPlayer::resumeFromVideo(uint64 videoPlayId) {
|
||||||
|
AudioMsgId current;
|
||||||
|
{
|
||||||
|
QMutexLocker lock(&playerMutex);
|
||||||
|
auto type = AudioMsgId::Type::Video;
|
||||||
|
auto data = dataForType(type);
|
||||||
|
t_assert(data != nullptr);
|
||||||
|
|
||||||
|
if (data->videoPlayId != videoPlayId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
float64 suppressGain = suppressSongGain * Global::VideoVolume();
|
||||||
|
|
||||||
|
current = data->audio;
|
||||||
|
switch (data->playbackState.state) {
|
||||||
|
case AudioPlayerPausing:
|
||||||
|
case AudioPlayerPaused:
|
||||||
|
case AudioPlayerPausedAtEnd: {
|
||||||
|
if (data->playbackState.state == AudioPlayerPaused) {
|
||||||
|
updateCurrentStarted(type);
|
||||||
|
} else if (data->playbackState.state == AudioPlayerPausedAtEnd) {
|
||||||
|
if (alIsSource(data->source)) {
|
||||||
|
alSourcei(data->source, AL_SAMPLE_OFFSET, qMax(data->playbackState.position - data->skipStart, 0LL));
|
||||||
|
if (!checkCurrentALError(type)) return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data->playbackState.state = AudioPlayerPlaying;
|
||||||
|
|
||||||
|
ALint state = AL_INITIAL;
|
||||||
|
alGetSourcei(data->source, AL_SOURCE_STATE, &state);
|
||||||
|
if (!checkCurrentALError(type)) return;
|
||||||
|
|
||||||
|
if (state != AL_PLAYING) {
|
||||||
|
audioPlayer()->resumeDevice();
|
||||||
|
|
||||||
|
alSourcef(data->source, AL_GAIN, suppressGain);
|
||||||
|
if (!checkCurrentALError(type)) return;
|
||||||
|
|
||||||
|
alSourcePlay(data->source);
|
||||||
|
if (!checkCurrentALError(type)) return;
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
}
|
||||||
|
emit faderOnTimer();
|
||||||
|
}
|
||||||
|
if (current) emit updated(current);
|
||||||
|
}
|
||||||
|
|
||||||
void AudioPlayer::feedFromVideo(VideoSoundPart &&part) {
|
void AudioPlayer::feedFromVideo(VideoSoundPart &&part) {
|
||||||
_loader->feedFromVideo(std_::move(part));
|
_loader->feedFromVideo(std_::move(part));
|
||||||
}
|
}
|
||||||
|
@ -592,6 +676,7 @@ void AudioPlayer::pauseresume(AudioMsgId::Type type, bool fast) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case AudioMsgId::Type::Voice: suppressGain = suppressAllGain; break;
|
case AudioMsgId::Type::Voice: suppressGain = suppressAllGain; break;
|
||||||
case AudioMsgId::Type::Song: suppressGain = suppressSongGain * Global::SongVolume(); break;
|
case AudioMsgId::Type::Song: suppressGain = suppressSongGain * Global::SongVolume(); break;
|
||||||
|
case AudioMsgId::Type::Video: suppressGain = suppressSongGain * Global::VideoVolume(); break;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (current->playbackState.state) {
|
switch (current->playbackState.state) {
|
||||||
|
|
|
@ -67,12 +67,14 @@ public:
|
||||||
void stop(AudioMsgId::Type type);
|
void stop(AudioMsgId::Type type);
|
||||||
|
|
||||||
// Video player audio stream interface.
|
// Video player audio stream interface.
|
||||||
void playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position);
|
void initFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position);
|
||||||
void feedFromVideo(VideoSoundPart &&part);
|
void feedFromVideo(VideoSoundPart &&part);
|
||||||
int64 getVideoCorrectedTime(uint64 playId, uint64 systemMs);
|
int64 getVideoCorrectedTime(uint64 playId, uint64 systemMs);
|
||||||
void videoSoundProgress(const AudioMsgId &audio);
|
void videoSoundProgress(const AudioMsgId &audio);
|
||||||
AudioPlaybackState currentVideoState(uint64 videoPlayId);
|
AudioPlaybackState currentVideoState(uint64 videoPlayId);
|
||||||
void stopFromVideo(uint64 videoPlayId);
|
void stopFromVideo(uint64 videoPlayId);
|
||||||
|
void pauseFromVideo(uint64 videoPlayId);
|
||||||
|
void resumeFromVideo(uint64 videoPlayId);
|
||||||
|
|
||||||
void stopAndClear();
|
void stopAndClear();
|
||||||
|
|
||||||
|
|
|
@ -183,6 +183,18 @@ int64 FFMpegReaderImplementation::durationMs() const {
|
||||||
return (_fmtContext->streams[_streamId]->duration * 1000LL * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den;
|
return (_fmtContext->streams[_streamId]->duration * 1000LL * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void FFMpegReaderImplementation::pauseAudio() {
|
||||||
|
if (_audioStreamId >= 0) {
|
||||||
|
audioPlayer()->pauseFromVideo(_playId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFMpegReaderImplementation::resumeAudio() {
|
||||||
|
if (_audioStreamId >= 0) {
|
||||||
|
audioPlayer()->resumeFromVideo(_playId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
|
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
|
||||||
t_assert(_frameRead);
|
t_assert(_frameRead);
|
||||||
_frameRead = false;
|
_frameRead = false;
|
||||||
|
@ -322,7 +334,7 @@ bool FFMpegReaderImplementation::start(Mode mode) {
|
||||||
} else {
|
} else {
|
||||||
soundData->length = (_fmtContext->streams[_audioStreamId]->duration * soundData->frequency * _fmtContext->streams[_audioStreamId]->time_base.num) / _fmtContext->streams[_audioStreamId]->time_base.den;
|
soundData->length = (_fmtContext->streams[_audioStreamId]->duration * soundData->frequency * _fmtContext->streams[_audioStreamId]->time_base.num) / _fmtContext->streams[_audioStreamId]->time_base.den;
|
||||||
}
|
}
|
||||||
audioPlayer()->playFromVideo(AudioMsgId(AudioMsgId::Type::Video), _playId, std_::move(soundData), 0);
|
audioPlayer()->initFromVideo(AudioMsgId(AudioMsgId::Type::Video), _playId, std_::move(soundData), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
@ -333,7 +345,7 @@ QString FFMpegReaderImplementation::logData() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
|
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
|
||||||
if (_mode == Mode::Normal && _audioStreamId >= 0) {
|
if (_audioStreamId >= 0) {
|
||||||
audioPlayer()->stopFromVideo(_playId);
|
audioPlayer()->stopFromVideo(_playId);
|
||||||
}
|
}
|
||||||
if (_frameRead) {
|
if (_frameRead) {
|
||||||
|
|
|
@ -38,13 +38,19 @@ public:
|
||||||
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
|
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
|
||||||
|
|
||||||
ReadResult readFramesTill(int64 ms) override;
|
ReadResult readFramesTill(int64 ms) override;
|
||||||
|
|
||||||
int64 frameRealTime() const override;
|
int64 frameRealTime() const override;
|
||||||
uint64 framePresentationTime() const override;
|
uint64 framePresentationTime() const override;
|
||||||
|
|
||||||
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
||||||
|
|
||||||
int64 durationMs() const override;
|
int64 durationMs() const override;
|
||||||
bool hasAudio() const override {
|
bool hasAudio() const override {
|
||||||
return (_audioStreamId >= 0);
|
return (_audioStreamId >= 0);
|
||||||
}
|
}
|
||||||
|
void pauseAudio() override;
|
||||||
|
void resumeAudio() override;
|
||||||
|
|
||||||
bool start(Mode mode) override;
|
bool start(Mode mode) override;
|
||||||
|
|
||||||
QString logData() const;
|
QString logData() const;
|
||||||
|
|
|
@ -55,6 +55,8 @@ public:
|
||||||
|
|
||||||
virtual int64 durationMs() const = 0;
|
virtual int64 durationMs() const = 0;
|
||||||
virtual bool hasAudio() const = 0;
|
virtual bool hasAudio() const = 0;
|
||||||
|
virtual void pauseAudio() = 0;
|
||||||
|
virtual void resumeAudio() = 0;
|
||||||
|
|
||||||
virtual bool start(Mode mode) = 0;
|
virtual bool start(Mode mode) = 0;
|
||||||
virtual ~ReaderImplementation() {
|
virtual ~ReaderImplementation() {
|
||||||
|
|
|
@ -32,13 +32,21 @@ public:
|
||||||
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
|
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
|
||||||
|
|
||||||
ReadResult readFramesTill(int64 ms) override;
|
ReadResult readFramesTill(int64 ms) override;
|
||||||
|
|
||||||
int64 frameRealTime() const override;
|
int64 frameRealTime() const override;
|
||||||
uint64 framePresentationTime() const override;
|
uint64 framePresentationTime() const override;
|
||||||
|
|
||||||
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
||||||
|
|
||||||
int64 durationMs() const override;
|
int64 durationMs() const override;
|
||||||
bool hasAudio() const override {
|
bool hasAudio() const override {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
void pauseAudio() override {
|
||||||
|
}
|
||||||
|
void resumeAudio() override {
|
||||||
|
}
|
||||||
|
|
||||||
bool start(Mode mode) override;
|
bool start(Mode mode) override;
|
||||||
|
|
||||||
~QtGifReaderImplementation();
|
~QtGifReaderImplementation();
|
||||||
|
|
|
@ -169,6 +169,9 @@ void Reader::moveToNextWrite() const {
|
||||||
} else if (step == WaitingForRequestStep) {
|
} else if (step == WaitingForRequestStep) {
|
||||||
} else if (step == WaitingForFirstFrameStep) {
|
} else if (step == WaitingForFirstFrameStep) {
|
||||||
_step.storeRelease(0);
|
_step.storeRelease(0);
|
||||||
|
|
||||||
|
// Force paint the first frame so moveToNextShow() is called.
|
||||||
|
_frames[0].displayed.storeRelease(0);
|
||||||
} else if (step % 2) {
|
} else if (step % 2) {
|
||||||
_step.storeRelease((step + 1) % 6);
|
_step.storeRelease((step + 1) % 6);
|
||||||
}
|
}
|
||||||
|
@ -206,8 +209,8 @@ QPixmap Reader::current(int32 framew, int32 frameh, int32 outerw, int32 outerh,
|
||||||
|
|
||||||
if (ms) {
|
if (ms) {
|
||||||
frame->displayed.storeRelease(1);
|
frame->displayed.storeRelease(1);
|
||||||
if (_paused.loadAcquire()) {
|
if (_autoPausedGif.loadAcquire()) {
|
||||||
_paused.storeRelease(0);
|
_autoPausedGif.storeRelease(0);
|
||||||
if (managers.size() <= _threadIndex) error();
|
if (managers.size() <= _threadIndex) error();
|
||||||
if (_state != State::Error) {
|
if (_state != State::Error) {
|
||||||
managers.at(_threadIndex)->update(this);
|
managers.at(_threadIndex)->update(this);
|
||||||
|
@ -273,6 +276,18 @@ int64 Reader::getDurationMs() const {
|
||||||
return ready() ? _durationMs : 0;
|
return ready() ? _durationMs : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Reader::pauseResumeVideo() {
|
||||||
|
if (managers.size() <= _threadIndex) error();
|
||||||
|
if (_state == State::Error) return;
|
||||||
|
|
||||||
|
_videoPauseRequest.storeRelease(1 - _videoPauseRequest.loadAcquire());
|
||||||
|
managers.at(_threadIndex)->start(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Reader::videoPaused() const {
|
||||||
|
return _videoPauseRequest.loadAcquire() != 0;
|
||||||
|
}
|
||||||
|
|
||||||
int32 Reader::width() const {
|
int32 Reader::width() const {
|
||||||
return _width;
|
return _width;
|
||||||
}
|
}
|
||||||
|
@ -352,8 +367,14 @@ public:
|
||||||
if (!_request.valid()) {
|
if (!_request.valid()) {
|
||||||
return start(ms);
|
return start(ms);
|
||||||
}
|
}
|
||||||
|
if (!_started) {
|
||||||
|
_started = true;
|
||||||
|
if (!_videoPausedAtMs) {
|
||||||
|
_implementation->resumeAudio();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!_paused && ms >= _nextFrameWhen) {
|
if (!_autoPausedGif && !_videoPausedAtMs && ms >= _nextFrameWhen) {
|
||||||
return ProcessResult::Repaint;
|
return ProcessResult::Repaint;
|
||||||
}
|
}
|
||||||
return ProcessResult::Wait;
|
return ProcessResult::Wait;
|
||||||
|
@ -418,6 +439,24 @@ public:
|
||||||
_animationStarted = _nextFrameWhen = ms;
|
_animationStarted = _nextFrameWhen = ms;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void pauseVideo(uint64 ms) {
|
||||||
|
if (_videoPausedAtMs) return; // Paused already.
|
||||||
|
|
||||||
|
_videoPausedAtMs = ms;
|
||||||
|
_implementation->pauseAudio();
|
||||||
|
}
|
||||||
|
|
||||||
|
void resumeVideo(uint64 ms) {
|
||||||
|
if (!_videoPausedAtMs) return; // Not paused.
|
||||||
|
|
||||||
|
int64 delta = static_cast<int64>(ms) - static_cast<int64>(_videoPausedAtMs);
|
||||||
|
_animationStarted += delta;
|
||||||
|
_nextFrameWhen += delta;
|
||||||
|
|
||||||
|
_videoPausedAtMs = 0;
|
||||||
|
_implementation->resumeAudio();
|
||||||
|
}
|
||||||
|
|
||||||
ProcessResult error() {
|
ProcessResult error() {
|
||||||
stop();
|
stop();
|
||||||
_state = State::Error;
|
_state = State::Error;
|
||||||
|
@ -479,7 +518,9 @@ private:
|
||||||
uint64 _nextFrameWhen = 0;
|
uint64 _nextFrameWhen = 0;
|
||||||
int64 _nextFramePositionMs = 0;
|
int64 _nextFramePositionMs = 0;
|
||||||
|
|
||||||
bool _paused = false;
|
bool _autoPausedGif = false;
|
||||||
|
bool _started = false;
|
||||||
|
uint64 _videoPausedAtMs = 0;
|
||||||
|
|
||||||
friend class Manager;
|
friend class Manager;
|
||||||
|
|
||||||
|
@ -580,14 +621,14 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
|
||||||
it.key()->_hasAudio = reader->_hasAudio;
|
it.key()->_hasAudio = reader->_hasAudio;
|
||||||
}
|
}
|
||||||
// See if we need to pause GIF because it is not displayed right now.
|
// See if we need to pause GIF because it is not displayed right now.
|
||||||
if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) {
|
if (!reader->_autoPausedGif && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) {
|
||||||
int32 ishowing, iprevious;
|
int32 ishowing, iprevious;
|
||||||
Reader::Frame *showing = it.key()->frameToShow(&ishowing), *previous = it.key()->frameToWriteNext(false, &iprevious);
|
Reader::Frame *showing = it.key()->frameToShow(&ishowing), *previous = it.key()->frameToWriteNext(false, &iprevious);
|
||||||
t_assert(previous != 0 && showing != 0 && ishowing >= 0 && iprevious >= 0);
|
t_assert(previous != 0 && showing != 0 && ishowing >= 0 && iprevious >= 0);
|
||||||
if (reader->_frames[ishowing].when > 0 && showing->displayed.loadAcquire() <= 0) { // current frame was not shown
|
if (reader->_frames[ishowing].when > 0 && showing->displayed.loadAcquire() <= 0) { // current frame was not shown
|
||||||
if (reader->_frames[ishowing].when + WaitBeforeGifPause < ms || (reader->_frames[iprevious].when && previous->displayed.loadAcquire() <= 0)) {
|
if (reader->_frames[ishowing].when + WaitBeforeGifPause < ms || (reader->_frames[iprevious].when && previous->displayed.loadAcquire() <= 0)) {
|
||||||
reader->_paused = true;
|
reader->_autoPausedGif = true;
|
||||||
it.key()->_paused.storeRelease(1);
|
it.key()->_autoPausedGif.storeRelease(1);
|
||||||
result = ProcessResult::Paused;
|
result = ProcessResult::Paused;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -668,8 +709,13 @@ void Manager::process() {
|
||||||
_readers.insert(it.key()->_private, 0);
|
_readers.insert(it.key()->_private, 0);
|
||||||
} else {
|
} else {
|
||||||
i.value() = ms;
|
i.value() = ms;
|
||||||
if (i.key()->_paused && !it.key()->_paused.loadAcquire()) {
|
if (i.key()->_autoPausedGif && !it.key()->_autoPausedGif.loadAcquire()) {
|
||||||
i.key()->_paused = false;
|
i.key()->_autoPausedGif = false;
|
||||||
|
}
|
||||||
|
if (it.key()->_videoPauseRequest.loadAcquire()) {
|
||||||
|
i.key()->pauseVideo(ms);
|
||||||
|
} else {
|
||||||
|
i.key()->resumeVideo(ms);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Reader::Frame *frame = it.key()->frameToWrite();
|
Reader::Frame *frame = it.key()->frameToWrite();
|
||||||
|
@ -691,9 +737,15 @@ void Manager::process() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ms = getms();
|
ms = getms();
|
||||||
i.value() = reader->_nextFrameWhen ? reader->_nextFrameWhen : (ms + 86400 * 1000ULL);
|
if (reader->_videoPausedAtMs) {
|
||||||
|
i.value() = ms + 86400 * 1000ULL;
|
||||||
|
} else if (reader->_nextFrameWhen && reader->_started) {
|
||||||
|
i.value() = reader->_nextFrameWhen;
|
||||||
|
} else {
|
||||||
|
i.value() = (ms + 86400 * 1000ULL);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!reader->_paused && i.value() < minms) {
|
if (!reader->_autoPausedGif && i.value() < minms) {
|
||||||
minms = i.value();
|
minms = i.value();
|
||||||
}
|
}
|
||||||
++i;
|
++i;
|
||||||
|
|
|
@ -85,9 +85,10 @@ public:
|
||||||
Frame *frame = frameToShow();
|
Frame *frame = frameToShow();
|
||||||
return frame ? (frame->displayed.loadAcquire() != 0) : true;
|
return frame ? (frame->displayed.loadAcquire() != 0) : true;
|
||||||
}
|
}
|
||||||
bool paused() const {
|
bool autoPausedGif() const {
|
||||||
return _paused.loadAcquire();
|
return _autoPausedGif.loadAcquire();
|
||||||
}
|
}
|
||||||
|
bool videoPaused() const;
|
||||||
int threadIndex() const {
|
int threadIndex() const {
|
||||||
return _threadIndex;
|
return _threadIndex;
|
||||||
}
|
}
|
||||||
|
@ -105,6 +106,7 @@ public:
|
||||||
bool hasAudio() const;
|
bool hasAudio() const;
|
||||||
int64 getPositionMs() const;
|
int64 getPositionMs() const;
|
||||||
int64 getDurationMs() const;
|
int64 getDurationMs() const;
|
||||||
|
void pauseResumeVideo();
|
||||||
|
|
||||||
void stop();
|
void stop();
|
||||||
void error();
|
void error();
|
||||||
|
@ -153,7 +155,8 @@ private:
|
||||||
void moveToNextShow() const;
|
void moveToNextShow() const;
|
||||||
void moveToNextWrite() const;
|
void moveToNextWrite() const;
|
||||||
|
|
||||||
QAtomicInt _paused = 0;
|
QAtomicInt _autoPausedGif = 0;
|
||||||
|
QAtomicInt _videoPauseRequest = 0;
|
||||||
int32 _threadIndex;
|
int32 _threadIndex;
|
||||||
|
|
||||||
bool _autoplay = false;
|
bool _autoplay = false;
|
||||||
|
|
|
@ -222,6 +222,10 @@ bool MediaView::fileShown() const {
|
||||||
bool MediaView::gifShown() const {
|
bool MediaView::gifShown() const {
|
||||||
if (_gif && _gif->ready()) {
|
if (_gif && _gif->ready()) {
|
||||||
if (!_gif->started()) {
|
if (!_gif->started()) {
|
||||||
|
if (_doc->isVideo() && _autoplayVideoDocument != _doc && !_gif->videoPaused()) {
|
||||||
|
_gif->pauseResumeVideo();
|
||||||
|
const_cast<MediaView*>(this)->_videoPaused = _gif->videoPaused();
|
||||||
|
}
|
||||||
_gif->start(_gif->width(), _gif->height(), _gif->width(), _gif->height(), false);
|
_gif->start(_gif->width(), _gif->height(), _gif->width(), _gif->height(), false);
|
||||||
const_cast<MediaView*>(this)->_current = QPixmap();
|
const_cast<MediaView*>(this)->_current = QPixmap();
|
||||||
}
|
}
|
||||||
|
@ -1023,6 +1027,9 @@ void MediaView::showDocument(DocumentData *doc, HistoryItem *context) {
|
||||||
_overview = doc->isVideo() ? OverviewVideos : OverviewFiles;
|
_overview = doc->isVideo() ? OverviewVideos : OverviewFiles;
|
||||||
findCurrent();
|
findCurrent();
|
||||||
}
|
}
|
||||||
|
if (doc->isVideo()) {
|
||||||
|
_autoplayVideoDocument = doc;
|
||||||
|
}
|
||||||
displayDocument(doc, context);
|
displayDocument(doc, context);
|
||||||
preloadData(0);
|
preloadData(0);
|
||||||
activateControls();
|
activateControls();
|
||||||
|
@ -1090,6 +1097,10 @@ void MediaView::displayDocument(DocumentData *doc, HistoryItem *item) { // empty
|
||||||
_photo = nullptr;
|
_photo = nullptr;
|
||||||
_radial.stop();
|
_radial.stop();
|
||||||
|
|
||||||
|
if (_autoplayVideoDocument && _doc != _autoplayVideoDocument) {
|
||||||
|
_autoplayVideoDocument = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
_current = QPixmap();
|
_current = QPixmap();
|
||||||
|
|
||||||
_caption = Text();
|
_caption = Text();
|
||||||
|
@ -1262,8 +1273,8 @@ void MediaView::createClipController() {
|
||||||
setClipControllerGeometry();
|
setClipControllerGeometry();
|
||||||
_clipController->show();
|
_clipController->show();
|
||||||
|
|
||||||
connect(_clipController, SIGNAL(playPressed()), this, SLOT(onVideoPlay()));
|
connect(_clipController, SIGNAL(playPressed()), this, SLOT(onVideoPauseResume()));
|
||||||
connect(_clipController, SIGNAL(pausePressed()), this, SLOT(onVideoPause()));
|
connect(_clipController, SIGNAL(pausePressed()), this, SLOT(onVideoPauseResume()));
|
||||||
connect(_clipController, SIGNAL(seekProgress(int64)), this, SLOT(onVideoSeekProgress(int64)));
|
connect(_clipController, SIGNAL(seekProgress(int64)), this, SLOT(onVideoSeekProgress(int64)));
|
||||||
connect(_clipController, SIGNAL(seekFinished(int64)), this, SLOT(onVideoSeekFinished(int64)));
|
connect(_clipController, SIGNAL(seekFinished(int64)), this, SLOT(onVideoSeekFinished(int64)));
|
||||||
connect(_clipController, SIGNAL(volumeChanged(float64)), this, SLOT(onVideoVolumeChanged(float64)));
|
connect(_clipController, SIGNAL(volumeChanged(float64)), this, SLOT(onVideoVolumeChanged(float64)));
|
||||||
|
@ -1290,11 +1301,13 @@ void MediaView::setClipControllerGeometry() {
|
||||||
myEnsureResized(_clipController);
|
myEnsureResized(_clipController);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MediaView::onVideoPlay() {
|
void MediaView::onVideoPauseResume() {
|
||||||
if (auto item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) {
|
if (auto item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) {
|
||||||
if (_gif->state() == Media::Clip::State::Error) {
|
if (_gif->state() == Media::Clip::State::Error) {
|
||||||
displayDocument(_doc, item);
|
displayDocument(_doc, item);
|
||||||
} else if (_gif->state() == Media::Clip::State::Finished) {
|
} else if (_gif->state() == Media::Clip::State::Finished) {
|
||||||
|
_autoplayVideoDocument = _doc;
|
||||||
|
|
||||||
_current = _gif->current(_gif->width(), _gif->height(), _gif->width(), _gif->height(), getms());
|
_current = _gif->current(_gif->width(), _gif->height(), _gif->width(), _gif->height(), getms());
|
||||||
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), Media::Clip::Reader::Mode::Video);
|
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), Media::Clip::Reader::Mode::Video);
|
||||||
|
|
||||||
|
@ -1309,17 +1322,17 @@ void MediaView::onVideoPlay() {
|
||||||
state.frequency = _videoFrequencyMs;
|
state.frequency = _videoFrequencyMs;
|
||||||
updateVideoPlaybackState(state);
|
updateVideoPlaybackState(state);
|
||||||
} else {
|
} else {
|
||||||
//
|
_gif->pauseResumeVideo();
|
||||||
|
_videoPaused = _gif->videoPaused();
|
||||||
|
if (_videoIsSilent) {
|
||||||
|
updateSilentVideoPlaybackState();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
stopGif();
|
stopGif();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void MediaView::onVideoPause() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
void MediaView::onVideoSeekProgress(int64 position) {
|
void MediaView::onVideoSeekProgress(int64 position) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1342,11 +1355,10 @@ void MediaView::onVideoFromFullScreen() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
|
void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
|
||||||
if (audioId.type() != AudioMsgId::Type::Video) {
|
if (audioId.type() != AudioMsgId::Type::Video || !_gif) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
t_assert(_gif != nullptr);
|
|
||||||
t_assert(audioPlayer() != nullptr);
|
t_assert(audioPlayer() != nullptr);
|
||||||
auto state = audioPlayer()->currentVideoState(_gif->playId());
|
auto state = audioPlayer()->currentVideoState(_gif->playId());
|
||||||
updateVideoPlaybackState(state);
|
updateVideoPlaybackState(state);
|
||||||
|
@ -1690,7 +1702,7 @@ void MediaView::keyPressEvent(QKeyEvent *e) {
|
||||||
if (_doc && !_doc->loading() && !fileShown()) {
|
if (_doc && !_doc->loading() && !fileShown()) {
|
||||||
onDocClick();
|
onDocClick();
|
||||||
} else if (_doc->isVideo()) {
|
} else if (_doc->isVideo()) {
|
||||||
onVideoPlay();
|
onVideoPauseResume();
|
||||||
}
|
}
|
||||||
} else if (e->key() == Qt::Key_Left) {
|
} else if (e->key() == Qt::Key_Left) {
|
||||||
moveToNext(-1);
|
moveToNext(-1);
|
||||||
|
|
|
@ -117,8 +117,7 @@ protected:
|
||||||
bool eventFilter(QObject *obj, QEvent *e) override;
|
bool eventFilter(QObject *obj, QEvent *e) override;
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void onVideoPlay();
|
void onVideoPauseResume();
|
||||||
void onVideoPause();
|
|
||||||
void onVideoSeekProgress(int64 position);
|
void onVideoSeekProgress(int64 position);
|
||||||
void onVideoSeekFinished(int64 position);
|
void onVideoSeekFinished(int64 position);
|
||||||
void onVideoVolumeChanged(float64 volume);
|
void onVideoVolumeChanged(float64 volume);
|
||||||
|
@ -184,6 +183,7 @@ private:
|
||||||
QString _headerText;
|
QString _headerText;
|
||||||
|
|
||||||
ChildWidget<Media::Clip::Controller> _clipController = { nullptr };
|
ChildWidget<Media::Clip::Controller> _clipController = { nullptr };
|
||||||
|
DocumentData *_autoplayVideoDocument = nullptr;
|
||||||
|
|
||||||
Text _caption;
|
Text _caption;
|
||||||
QRect _captionRect;
|
QRect _captionRect;
|
||||||
|
|
|
@ -1260,7 +1260,7 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
explicit operator bool() const {
|
explicit operator bool() const {
|
||||||
return _audio;
|
return _audio || (_type == Type::Video);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
Loading…
Reference in New Issue