mirror of https://github.com/procxx/kepka.git
Video play progress displayed in MediaView (in case no audio stream).
This commit is contained in:
parent
01d448c1bd
commit
034657dd2c
|
@ -169,10 +169,19 @@ bool FFMpegReaderImplementation::readFramesTill(int64 ms) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64 FFMpegReaderImplementation::frameRealTime() const {
|
||||||
|
return _frameMs;
|
||||||
|
}
|
||||||
|
|
||||||
uint64 FFMpegReaderImplementation::framePresentationTime() const {
|
uint64 FFMpegReaderImplementation::framePresentationTime() const {
|
||||||
return static_cast<uint64>(qMax(_frameTime + _frameTimeCorrection, 0LL));
|
return static_cast<uint64>(qMax(_frameTime + _frameTimeCorrection, 0LL));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64 FFMpegReaderImplementation::durationMs() const {
|
||||||
|
if (_fmtContext->streams[_streamId]->duration == AV_NOPTS_VALUE) return 0;
|
||||||
|
return (_fmtContext->streams[_streamId]->duration * 1000LL * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den;
|
||||||
|
}
|
||||||
|
|
||||||
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
|
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
|
||||||
t_assert(_frameRead);
|
t_assert(_frameRead);
|
||||||
_frameRead = false;
|
_frameRead = false;
|
||||||
|
@ -322,11 +331,6 @@ QString FFMpegReaderImplementation::logData() const {
|
||||||
return qsl("for file '%1', data size '%2'").arg(_location ? _location->name() : QString()).arg(_data->size());
|
return qsl("for file '%1', data size '%2'").arg(_location ? _location->name() : QString()).arg(_data->size());
|
||||||
}
|
}
|
||||||
|
|
||||||
int FFMpegReaderImplementation::duration() const {
|
|
||||||
if (_fmtContext->streams[_streamId]->duration == AV_NOPTS_VALUE) return 0;
|
|
||||||
return (_fmtContext->streams[_streamId]->duration * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den;
|
|
||||||
}
|
|
||||||
|
|
||||||
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
|
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
|
||||||
if (_mode == Mode::Normal && _audioStreamId >= 0) {
|
if (_mode == Mode::Normal && _audioStreamId >= 0) {
|
||||||
audioPlayer()->stop(AudioMsgId::Type::Video);
|
audioPlayer()->stop(AudioMsgId::Type::Video);
|
||||||
|
|
|
@ -38,11 +38,12 @@ public:
|
||||||
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
|
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
|
||||||
|
|
||||||
bool readFramesTill(int64 ms) override;
|
bool readFramesTill(int64 ms) override;
|
||||||
|
int64 frameRealTime() const override;
|
||||||
uint64 framePresentationTime() const override;
|
uint64 framePresentationTime() const override;
|
||||||
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
||||||
|
int64 durationMs() const override;
|
||||||
bool start(Mode mode) override;
|
bool start(Mode mode) override;
|
||||||
|
|
||||||
int duration() const;
|
|
||||||
QString logData() const;
|
QString logData() const;
|
||||||
|
|
||||||
~FFMpegReaderImplementation();
|
~FFMpegReaderImplementation();
|
||||||
|
|
|
@ -41,12 +41,15 @@ public:
|
||||||
// Read frames till current frame will have presentation time > ms.
|
// Read frames till current frame will have presentation time > ms.
|
||||||
virtual bool readFramesTill(int64 ms) = 0;
|
virtual bool readFramesTill(int64 ms) = 0;
|
||||||
|
|
||||||
// Get current frame presentation time.
|
// Get current frame real and presentation time.
|
||||||
|
virtual int64 frameRealTime() const = 0;
|
||||||
virtual uint64 framePresentationTime() const = 0;
|
virtual uint64 framePresentationTime() const = 0;
|
||||||
|
|
||||||
// Render current frame to an image with specific size.
|
// Render current frame to an image with specific size.
|
||||||
virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0;
|
virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0;
|
||||||
|
|
||||||
|
virtual int64 durationMs() const = 0;
|
||||||
|
|
||||||
virtual bool start(Mode mode) = 0;
|
virtual bool start(Mode mode) = 0;
|
||||||
virtual ~ReaderImplementation() {
|
virtual ~ReaderImplementation() {
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,6 +47,10 @@ bool QtGifReaderImplementation::readFramesTill(int64 ms) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64 QtGifReaderImplementation::frameRealTime() const {
|
||||||
|
return _frameRealTime;
|
||||||
|
}
|
||||||
|
|
||||||
uint64 QtGifReaderImplementation::framePresentationTime() const {
|
uint64 QtGifReaderImplementation::framePresentationTime() const {
|
||||||
return static_cast<uint64>(qMax(_frameTime, 0LL));
|
return static_cast<uint64>(qMax(_frameTime, 0LL));
|
||||||
}
|
}
|
||||||
|
@ -63,6 +67,7 @@ bool QtGifReaderImplementation::readNextFrame() {
|
||||||
}
|
}
|
||||||
--_framesLeft;
|
--_framesLeft;
|
||||||
_frameTime += _frameDelay;
|
_frameTime += _frameDelay;
|
||||||
|
_frameRealTime += _frameDelay;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,6 +95,10 @@ bool QtGifReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QS
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64 QtGifReaderImplementation::durationMs() const {
|
||||||
|
return 0; // not supported
|
||||||
|
}
|
||||||
|
|
||||||
bool QtGifReaderImplementation::start(Mode mode) {
|
bool QtGifReaderImplementation::start(Mode mode) {
|
||||||
if (mode == Mode::OnlyGifv) return false;
|
if (mode == Mode::OnlyGifv) return false;
|
||||||
return jumpToStart();
|
return jumpToStart();
|
||||||
|
|
|
@ -32,8 +32,10 @@ public:
|
||||||
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
|
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
|
||||||
|
|
||||||
bool readFramesTill(int64 ms) override;
|
bool readFramesTill(int64 ms) override;
|
||||||
|
int64 frameRealTime() const override;
|
||||||
uint64 framePresentationTime() const override;
|
uint64 framePresentationTime() const override;
|
||||||
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
||||||
|
int64 durationMs() const override;
|
||||||
bool start(Mode mode) override;
|
bool start(Mode mode) override;
|
||||||
|
|
||||||
~QtGifReaderImplementation();
|
~QtGifReaderImplementation();
|
||||||
|
@ -44,6 +46,7 @@ private:
|
||||||
|
|
||||||
QImageReader *_reader = nullptr;
|
QImageReader *_reader = nullptr;
|
||||||
int _framesLeft = 0;
|
int _framesLeft = 0;
|
||||||
|
int64 _frameRealTime = 0;
|
||||||
int64 _frameTime = 0;
|
int64 _frameTime = 0;
|
||||||
int _frameDelay = 0;
|
int _frameDelay = 0;
|
||||||
QImage _frame;
|
QImage _frame;
|
||||||
|
|
|
@ -112,7 +112,7 @@ Reader::Frame *Reader::frameToShow(int32 *index) const { // 0 means not ready
|
||||||
int32 step = _step.loadAcquire(), i;
|
int32 step = _step.loadAcquire(), i;
|
||||||
if (step == WaitingForDimensionsStep) {
|
if (step == WaitingForDimensionsStep) {
|
||||||
if (index) *index = 0;
|
if (index) *index = 0;
|
||||||
return 0;
|
return nullptr;
|
||||||
} else if (step == WaitingForRequestStep) {
|
} else if (step == WaitingForRequestStep) {
|
||||||
i = 0;
|
i = 0;
|
||||||
} else if (step == WaitingForFirstFrameStep) {
|
} else if (step == WaitingForFirstFrameStep) {
|
||||||
|
@ -130,7 +130,7 @@ Reader::Frame *Reader::frameToWrite(int32 *index) const { // 0 means not ready
|
||||||
i = 0;
|
i = 0;
|
||||||
} else if (step == WaitingForRequestStep) {
|
} else if (step == WaitingForRequestStep) {
|
||||||
if (index) *index = 0;
|
if (index) *index = 0;
|
||||||
return 0;
|
return nullptr;
|
||||||
} else if (step == WaitingForFirstFrameStep) {
|
} else if (step == WaitingForFirstFrameStep) {
|
||||||
i = 0;
|
i = 0;
|
||||||
} else {
|
} else {
|
||||||
|
@ -144,7 +144,7 @@ Reader::Frame *Reader::frameToWriteNext(bool checkNotWriting, int32 *index) cons
|
||||||
int32 step = _step.loadAcquire(), i;
|
int32 step = _step.loadAcquire(), i;
|
||||||
if (step == WaitingForDimensionsStep || step == WaitingForRequestStep || (checkNotWriting && (step % 2))) {
|
if (step == WaitingForDimensionsStep || step == WaitingForRequestStep || (checkNotWriting && (step % 2))) {
|
||||||
if (index) *index = 0;
|
if (index) *index = 0;
|
||||||
return 0;
|
return nullptr;
|
||||||
}
|
}
|
||||||
i = ((step + 4) / 2) % 3;
|
i = ((step + 4) / 2) % 3;
|
||||||
if (index) *index = i;
|
if (index) *index = i;
|
||||||
|
@ -258,6 +258,21 @@ bool Reader::ready() const {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool Reader::hasAudio() const {
|
||||||
|
return ready() ? _hasAudio : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
int64 Reader::getPositionMs() const {
|
||||||
|
if (auto frame = frameToShow()) {
|
||||||
|
return frame->positionMs;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int64 Reader::getDurationMs() const {
|
||||||
|
return ready() ? _durationMs : 0;
|
||||||
|
}
|
||||||
|
|
||||||
int32 Reader::width() const {
|
int32 Reader::width() const {
|
||||||
return _width;
|
return _width;
|
||||||
}
|
}
|
||||||
|
@ -313,6 +328,7 @@ public:
|
||||||
}
|
}
|
||||||
_width = frame()->original.width();
|
_width = frame()->original.width();
|
||||||
_height = frame()->original.height();
|
_height = frame()->original.height();
|
||||||
|
_durationMs = _implementation->durationMs();
|
||||||
return ProcessResult::Started;
|
return ProcessResult::Started;
|
||||||
}
|
}
|
||||||
return ProcessResult::Wait;
|
return ProcessResult::Wait;
|
||||||
|
@ -335,6 +351,7 @@ public:
|
||||||
if (!_implementation->readFramesTill(ms - _animationStarted)) {
|
if (!_implementation->readFramesTill(ms - _animationStarted)) {
|
||||||
return error();
|
return error();
|
||||||
}
|
}
|
||||||
|
_nextFramePositionMs = _implementation->frameRealTime();
|
||||||
_nextFrameWhen = _animationStarted + _implementation->framePresentationTime();
|
_nextFrameWhen = _animationStarted + _implementation->framePresentationTime();
|
||||||
|
|
||||||
if (!renderFrame()) {
|
if (!renderFrame()) {
|
||||||
|
@ -352,6 +369,7 @@ public:
|
||||||
frame()->pix = QPixmap();
|
frame()->pix = QPixmap();
|
||||||
frame()->pix = _prepareFrame(_request, frame()->original, frame()->alpha, frame()->cache);
|
frame()->pix = _prepareFrame(_request, frame()->original, frame()->alpha, frame()->cache);
|
||||||
frame()->when = _nextFrameWhen;
|
frame()->when = _nextFrameWhen;
|
||||||
|
frame()->positionMs = _nextFramePositionMs;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -427,6 +445,9 @@ private:
|
||||||
QImage original, cache;
|
QImage original, cache;
|
||||||
bool alpha = true;
|
bool alpha = true;
|
||||||
uint64 when = 0;
|
uint64 when = 0;
|
||||||
|
|
||||||
|
// Counted from the end, so that positionMs <= durationMs despite keep up delays.
|
||||||
|
int64 positionMs = 0;
|
||||||
};
|
};
|
||||||
Frame _frames[3];
|
Frame _frames[3];
|
||||||
int _frame = 0;
|
int _frame = 0;
|
||||||
|
@ -437,8 +458,10 @@ private:
|
||||||
int _width = 0;
|
int _width = 0;
|
||||||
int _height = 0;
|
int _height = 0;
|
||||||
|
|
||||||
|
int64 _durationMs = 0;
|
||||||
uint64 _animationStarted = 0;
|
uint64 _animationStarted = 0;
|
||||||
uint64 _nextFrameWhen = 0;
|
uint64 _nextFrameWhen = 0;
|
||||||
|
int64 _nextFramePositionMs = 0;
|
||||||
|
|
||||||
bool _paused = false;
|
bool _paused = false;
|
||||||
|
|
||||||
|
@ -531,6 +554,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
|
||||||
|
|
||||||
if (result == ProcessResult::Started) {
|
if (result == ProcessResult::Started) {
|
||||||
_loadLevel.fetchAndAddRelaxed(reader->_width * reader->_height - AverageGifSize);
|
_loadLevel.fetchAndAddRelaxed(reader->_width * reader->_height - AverageGifSize);
|
||||||
|
it.key()->_durationMs = reader->_durationMs;
|
||||||
}
|
}
|
||||||
// See if we need to pause GIF because it is not displayed right now.
|
// See if we need to pause GIF because it is not displayed right now.
|
||||||
if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) {
|
if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) {
|
||||||
|
@ -552,6 +576,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
|
||||||
frame->pix = reader->frame()->pix;
|
frame->pix = reader->frame()->pix;
|
||||||
frame->original = reader->frame()->original;
|
frame->original = reader->frame()->original;
|
||||||
frame->displayed.storeRelease(0);
|
frame->displayed.storeRelease(0);
|
||||||
|
frame->positionMs = reader->frame()->positionMs;
|
||||||
if (result == ProcessResult::Started) {
|
if (result == ProcessResult::Started) {
|
||||||
reader->startedAt(ms);
|
reader->startedAt(ms);
|
||||||
it.key()->moveToNextWrite();
|
it.key()->moveToNextWrite();
|
||||||
|
@ -704,7 +729,7 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data
|
||||||
request.factor = 1;
|
request.factor = 1;
|
||||||
cover = _prepareFrame(request, cover, hasAlpha, cacheForResize).toImage();
|
cover = _prepareFrame(request, cover, hasAlpha, cacheForResize).toImage();
|
||||||
}
|
}
|
||||||
int duration = reader->duration();
|
int duration = reader->durationMs() / 1000;
|
||||||
return MTP_documentAttributeVideo(MTP_int(duration), MTP_int(cover.width()), MTP_int(cover.height()));
|
return MTP_documentAttributeVideo(MTP_int(duration), MTP_int(cover.width()), MTP_int(cover.height()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,6 +101,10 @@ public:
|
||||||
}
|
}
|
||||||
bool ready() const;
|
bool ready() const;
|
||||||
|
|
||||||
|
bool hasAudio() const;
|
||||||
|
int64 getPositionMs() const;
|
||||||
|
int64 getDurationMs() const;
|
||||||
|
|
||||||
void stop();
|
void stop();
|
||||||
void error();
|
void error();
|
||||||
|
|
||||||
|
@ -118,6 +122,8 @@ private:
|
||||||
State _state = State::Reading;
|
State _state = State::Reading;
|
||||||
|
|
||||||
uint64 _playId;
|
uint64 _playId;
|
||||||
|
bool _hasAudio = false;
|
||||||
|
int64 _durationMs = 0;
|
||||||
|
|
||||||
mutable int _width = 0;
|
mutable int _width = 0;
|
||||||
mutable int _height = 0;
|
mutable int _height = 0;
|
||||||
|
@ -125,8 +131,6 @@ private:
|
||||||
// -2, -1 - init, 0-5 - work, show ((state + 1) / 2) % 3 state, write ((state + 3) / 2) % 3
|
// -2, -1 - init, 0-5 - work, show ((state + 1) / 2) % 3 state, write ((state + 3) / 2) % 3
|
||||||
mutable QAtomicInt _step = WaitingForDimensionsStep;
|
mutable QAtomicInt _step = WaitingForDimensionsStep;
|
||||||
struct Frame {
|
struct Frame {
|
||||||
Frame() : displayed(false) {
|
|
||||||
}
|
|
||||||
void clear() {
|
void clear() {
|
||||||
pix = QPixmap();
|
pix = QPixmap();
|
||||||
original = QImage();
|
original = QImage();
|
||||||
|
@ -134,7 +138,11 @@ private:
|
||||||
QPixmap pix;
|
QPixmap pix;
|
||||||
QImage original;
|
QImage original;
|
||||||
FrameRequest request;
|
FrameRequest request;
|
||||||
QAtomicInt displayed;
|
QAtomicInt displayed = 0;
|
||||||
|
|
||||||
|
// Should be counted from the end,
|
||||||
|
// so that positionMs <= _durationMs.
|
||||||
|
int64 positionMs = 0;
|
||||||
};
|
};
|
||||||
mutable Frame _frames[3];
|
mutable Frame _frames[3];
|
||||||
Frame *frameToShow(int *index = 0) const; // 0 means not ready
|
Frame *frameToShow(int *index = 0) const; // 0 means not ready
|
||||||
|
|
|
@ -44,7 +44,9 @@ void Playback::updateState(const AudioPlaybackState &playbackState) {
|
||||||
}
|
}
|
||||||
|
|
||||||
float64 progress = 0.;
|
float64 progress = 0.;
|
||||||
if (duration) {
|
if (position > duration) {
|
||||||
|
progress = 1.;
|
||||||
|
} else if (duration) {
|
||||||
progress = duration ? snap(float64(position) / duration, 0., 1.) : 0.;
|
progress = duration ? snap(float64(position) / duration, 0., 1.) : 0.;
|
||||||
}
|
}
|
||||||
if (duration != _duration || position != _position) {
|
if (duration != _duration || position != _position) {
|
||||||
|
|
|
@ -701,10 +701,16 @@ void MediaView::clipCallback(Media::Clip::Notification notification) {
|
||||||
|
|
||||||
switch (notification) {
|
switch (notification) {
|
||||||
case NotificationReinit: {
|
case NotificationReinit: {
|
||||||
if (HistoryItem *item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) {
|
if (auto item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) {
|
||||||
if (_gif->state() == State::Error) {
|
if (_gif->state() == State::Error) {
|
||||||
_current = QPixmap();
|
_current = QPixmap();
|
||||||
}
|
}
|
||||||
|
_videoIsSilent = _doc->isVideo() && !_gif->hasAudio();
|
||||||
|
if (_videoIsSilent) {
|
||||||
|
_videoDurationMs = _gif->getDurationMs();
|
||||||
|
_videoPositionMs = _gif->getPositionMs();
|
||||||
|
updateSilentVideoPlaybackState();
|
||||||
|
}
|
||||||
displayDocument(_doc, item);
|
displayDocument(_doc, item);
|
||||||
} else {
|
} else {
|
||||||
stopGif();
|
stopGif();
|
||||||
|
@ -713,6 +719,10 @@ void MediaView::clipCallback(Media::Clip::Notification notification) {
|
||||||
|
|
||||||
case NotificationRepaint: {
|
case NotificationRepaint: {
|
||||||
if (!_gif->currentDisplayed()) {
|
if (!_gif->currentDisplayed()) {
|
||||||
|
if (_videoIsSilent) {
|
||||||
|
_videoPositionMs = _gif->getPositionMs();
|
||||||
|
updateSilentVideoPlaybackState();
|
||||||
|
}
|
||||||
update(_x, _y, _w, _h);
|
update(_x, _y, _w, _h);
|
||||||
}
|
}
|
||||||
} break;
|
} break;
|
||||||
|
@ -1230,6 +1240,11 @@ void MediaView::createClipReader() {
|
||||||
auto mode = _doc->isVideo() ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif;
|
auto mode = _doc->isVideo() ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif;
|
||||||
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), mode);
|
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), mode);
|
||||||
|
|
||||||
|
// Correct values will be set when gif gets inited.
|
||||||
|
_videoIsSilent = false;
|
||||||
|
_videoPositionMs = 0ULL;
|
||||||
|
_videoDurationMs = _doc->duration() * 1000ULL;
|
||||||
|
|
||||||
createClipController();
|
createClipController();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1305,11 +1320,30 @@ void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
|
||||||
t_assert(_gif != nullptr);
|
t_assert(_gif != nullptr);
|
||||||
t_assert(audioPlayer() != nullptr);
|
t_assert(audioPlayer() != nullptr);
|
||||||
auto state = audioPlayer()->currentVideoState(_gif->playId());
|
auto state = audioPlayer()->currentVideoState(_gif->playId());
|
||||||
|
updateVideoPlaybackState(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
void MediaView::updateVideoPlaybackState(const AudioPlaybackState &state) {
|
||||||
if (state.frequency) {
|
if (state.frequency) {
|
||||||
_clipController->updatePlayback(state);
|
_clipController->updatePlayback(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MediaView::updateSilentVideoPlaybackState() {
|
||||||
|
AudioPlaybackState state;
|
||||||
|
if (_videoPaused) {
|
||||||
|
state.state = AudioPlayerPaused;
|
||||||
|
} else if (_videoPositionMs == _videoDurationMs) {
|
||||||
|
state.state = AudioPlayerStoppedAtEnd;
|
||||||
|
} else {
|
||||||
|
state.state = AudioPlayerPlaying;
|
||||||
|
}
|
||||||
|
state.position = _videoPositionMs;
|
||||||
|
state.duration = _videoDurationMs;
|
||||||
|
state.frequency = _videoFrequencyMs;
|
||||||
|
updateVideoPlaybackState(state);
|
||||||
|
}
|
||||||
|
|
||||||
void MediaView::paintEvent(QPaintEvent *e) {
|
void MediaView::paintEvent(QPaintEvent *e) {
|
||||||
QRect r(e->rect());
|
QRect r(e->rect());
|
||||||
QRegion region(e->region());
|
QRegion region(e->region());
|
||||||
|
|
|
@ -28,6 +28,8 @@ class Controller;
|
||||||
} // namespace Clip
|
} // namespace Clip
|
||||||
} // namespace Media
|
} // namespace Media
|
||||||
|
|
||||||
|
struct AudioPlaybackState;
|
||||||
|
|
||||||
class MediaView : public TWidget, public RPCSender, public ClickHandlerHost {
|
class MediaView : public TWidget, public RPCSender, public ClickHandlerHost {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
|
||||||
|
@ -130,6 +132,9 @@ private:
|
||||||
void findCurrent();
|
void findCurrent();
|
||||||
void loadBack();
|
void loadBack();
|
||||||
|
|
||||||
|
void updateVideoPlaybackState(const AudioPlaybackState &state);
|
||||||
|
void updateSilentVideoPlaybackState();
|
||||||
|
|
||||||
void createClipController();
|
void createClipController();
|
||||||
void setClipControllerGeometry();
|
void setClipControllerGeometry();
|
||||||
|
|
||||||
|
@ -197,6 +202,13 @@ private:
|
||||||
std_::unique_ptr<Media::Clip::Reader> _gif;
|
std_::unique_ptr<Media::Clip::Reader> _gif;
|
||||||
int32 _full = -1; // -1 - thumb, 0 - medium, 1 - full
|
int32 _full = -1; // -1 - thumb, 0 - medium, 1 - full
|
||||||
|
|
||||||
|
// Video without audio stream playback information.
|
||||||
|
bool _videoIsSilent = false;
|
||||||
|
bool _videoPaused = false;
|
||||||
|
int64 _videoPositionMs = 0;
|
||||||
|
int64 _videoDurationMs = 0;
|
||||||
|
int32 _videoFrequencyMs = 1000; // 1000 ms per second.
|
||||||
|
|
||||||
bool fileShown() const;
|
bool fileShown() const;
|
||||||
bool gifShown() const;
|
bool gifShown() const;
|
||||||
void stopGif();
|
void stopGif();
|
||||||
|
|
Loading…
Reference in New Issue