Optimized video frame pushing.

This commit is contained in:
John Preston 2019-03-07 17:23:19 +04:00
parent 5c4b459f57
commit a56a12a1ef
9 changed files with 143 additions and 54 deletions

View File

@ -500,8 +500,16 @@ bool Sandbox::notify(QObject *receiver, QEvent *e) {
const auto wrap = createEventNestingLevel(); const auto wrap = createEventNestingLevel();
const auto type = e->type(); const auto type = e->type();
if ((type == QEvent::UpdateRequest) && _application) { if (type == QEvent::UpdateRequest) {
_application->animationManager().update(); _widgetUpdateRequests.fire({});
// Profiling.
//const auto time = crl::now();
//LOG(("[%1] UPDATE STARTED").arg(time));
//const auto guard = gsl::finally([&] {
// const auto now = crl::now();
// LOG(("[%1] UPDATE FINISHED (%2)").arg(now).arg(now - time));
//});
//return QApplication::notify(receiver, e);
} }
return QApplication::notify(receiver, e); return QApplication::notify(receiver, e);
} }
@ -550,6 +558,10 @@ void Sandbox::resumeDelayedWindowActivations() {
_delayedActivationsPaused = false; _delayedActivationsPaused = false;
} }
rpl::producer<> Sandbox::widgetUpdateRequests() const {
return _widgetUpdateRequests.events();
}
ProxyData Sandbox::sandboxProxy() const { ProxyData Sandbox::sandboxProxy() const {
return _sandboxProxy; return _sandboxProxy;
} }

View File

@ -39,6 +39,8 @@ public:
void pauseDelayedWindowActivations(); void pauseDelayedWindowActivations();
void resumeDelayedWindowActivations(); void resumeDelayedWindowActivations();
rpl::producer<> widgetUpdateRequests() const;
ProxyData sandboxProxy() const; ProxyData sandboxProxy() const;
static Sandbox &Instance() { static Sandbox &Instance() {
@ -109,6 +111,8 @@ private:
QByteArray _lastCrashDump; QByteArray _lastCrashDump;
ProxyData _sandboxProxy; ProxyData _sandboxProxy;
rpl::event_stream<> _widgetUpdateRequests;
}; };
} // namespace Core } // namespace Core

View File

@ -13,6 +13,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_video_track.h" #include "media/streaming/media_streaming_video_track.h"
#include "media/audio/media_audio.h" // for SupportsSpeedControl() #include "media/audio/media_audio.h" // for SupportsSpeedControl()
#include "data/data_document.h" // for DocumentData::duration() #include "data/data_document.h" // for DocumentData::duration()
#include "core/sandbox.h" // for widgetUpdateRequests() producer
namespace Media { namespace Media {
namespace Streaming { namespace Streaming {
@ -81,25 +82,38 @@ Player::Player(
std::unique_ptr<Loader> loader) std::unique_ptr<Loader> loader)
: _file(std::make_unique<File>(owner, std::move(loader))) : _file(std::make_unique<File>(owner, std::move(loader)))
, _remoteLoader(_file->isRemoteLoader()) , _remoteLoader(_file->isRemoteLoader())
, _renderFrameTimer([=] { checkNextFrame(); }) { , _renderFrameTimer([=] { checkNextFrameRender(); }) {
} }
not_null<FileDelegate*> Player::delegate() { not_null<FileDelegate*> Player::delegate() {
return static_cast<FileDelegate*>(this); return static_cast<FileDelegate*>(this);
} }
void Player::checkNextFrame() { void Player::checkNextFrameRender() {
Expects(_nextFrameTime != kTimeUnknown); Expects(_nextFrameTime != kTimeUnknown);
Expects(!_renderFrameTimer.isActive());
const auto now = crl::now(); const auto now = crl::now();
if (now < _nextFrameTime) { if (now < _nextFrameTime) {
_renderFrameTimer.callOnce(_nextFrameTime - now); if (!_renderFrameTimer.isActive()) {
_renderFrameTimer.callOnce(_nextFrameTime - now);
}
} else { } else {
_renderFrameTimer.cancel();
_nextFrameTime = kTimeUnknown;
renderFrame(now); renderFrame(now);
} }
} }
void Player::checkNextFrameAvailability() {
Expects(_video != nullptr);
_nextFrameTime = _video->nextFrameDisplayTime();
if (_nextFrameTime != kTimeUnknown) {
LOG(("[%2] RENDERING AT: %1").arg(_nextFrameTime).arg(crl::now()));
checkVideoStep();
}
}
void Player::renderFrame(crl::time now) { void Player::renderFrame(crl::time now) {
Expects(_video != nullptr); Expects(_video != nullptr);
@ -454,6 +468,7 @@ void Player::updatePausedState() {
if (!_paused && _stage == Stage::Ready) { if (!_paused && _stage == Stage::Ready) {
const auto guard = base::make_weak(&_sessionGuard); const auto guard = base::make_weak(&_sessionGuard);
start(); start();
LOG(("[%1] STARTED.").arg(crl::now()));
if (!guard) { if (!guard) {
return; return;
} }
@ -548,19 +563,24 @@ void Player::start() {
} }
if (guard && _video) { if (guard && _video) {
_video->renderNextFrame( _video->checkNextFrame(
) | rpl::start_with_next_done([=](crl::time when) { ) | rpl::start_with_next_done([=] {
_nextFrameTime = when; checkVideoStep();
LOG(("RENDERING AT: %1").arg(when));
checkNextFrame();
}, [=] { }, [=] {
Expects(_stage == Stage::Started); Assert(_stage == Stage::Started);
_videoFinished = true; _videoFinished = true;
if (!_audio || _audioFinished) { if (!_audio || _audioFinished) {
_updates.fire({ Finished() }); _updates.fire({ Finished() });
} }
}, _sessionLifetime); }, _sessionLifetime);
Core::Sandbox::Instance().widgetUpdateRequests(
) | rpl::filter([=] {
return !_videoFinished;
}) | rpl::start_with_next([=] {
checkVideoStep();
}, _sessionLifetime);
} }
if (guard && _audio) { if (guard && _audio) {
trackSendReceivedTill(*_audio, _information.audio.state); trackSendReceivedTill(*_audio, _information.audio.state);
@ -570,6 +590,14 @@ void Player::start() {
} }
} }
void Player::checkVideoStep() {
if (_nextFrameTime != kTimeUnknown) {
checkNextFrameRender();
} else {
checkNextFrameAvailability();
}
}
void Player::stop() { void Player::stop() {
_file->stop(); _file->stop();
_sessionLifetime = rpl::lifetime(); _sessionLifetime = rpl::lifetime();

View File

@ -91,7 +91,9 @@ private:
void start(); void start();
void provideStartInformation(); void provideStartInformation();
void fail(Error error); void fail(Error error);
void checkNextFrame(); void checkVideoStep();
void checkNextFrameRender();
void checkNextFrameAvailability();
void renderFrame(crl::time now); void renderFrame(crl::time now);
void audioReceivedTill(crl::time position); void audioReceivedTill(crl::time position);
void audioPlayedTill(crl::time position); void audioPlayedTill(crl::time position);

View File

@ -36,7 +36,7 @@ public:
void process(Packet &&packet); void process(Packet &&packet);
[[nodisacrd]] rpl::producer<crl::time> displayFrameAt() const; [[nodisacrd]] rpl::producer<> checkNextFrame() const;
[[nodisacrd]] rpl::producer<> waitingForData() const; [[nodisacrd]] rpl::producer<> waitingForData() const;
void pause(crl::time time); void pause(crl::time time);
@ -91,8 +91,7 @@ private:
crl::time _resumedTime = kTimeUnknown; crl::time _resumedTime = kTimeUnknown;
mutable TimePoint _syncTimePoint; mutable TimePoint _syncTimePoint;
crl::time _framePositionShift = 0; crl::time _framePositionShift = 0;
crl::time _nextFrameDisplayTime = kTimeUnknown; rpl::event_stream<> _checkNextFrame;
rpl::event_stream<crl::time> _nextFrameTimeUpdates;
rpl::event_stream<> _waitingForData; rpl::event_stream<> _waitingForData;
FrameRequest _request; FrameRequest _request;
@ -122,13 +121,12 @@ VideoTrackObject::VideoTrackObject(
Expects(_error != nullptr); Expects(_error != nullptr);
} }
rpl::producer<crl::time> VideoTrackObject::displayFrameAt() const { rpl::producer<> VideoTrackObject::checkNextFrame() const {
return interrupted() return interrupted()
? (rpl::complete<crl::time>() | rpl::type_erased()) ? (rpl::complete<>() | rpl::type_erased())
: (_nextFrameDisplayTime == kTimeUnknown) : !_shared->firstPresentHappened()
? (_nextFrameTimeUpdates.events() | rpl::type_erased()) ? (_checkNextFrame.events() | rpl::type_erased())
: _nextFrameTimeUpdates.events_starting_with_copy( : _checkNextFrame.events_starting_with({});
_nextFrameDisplayTime);
} }
rpl::producer<> VideoTrackObject::waitingForData() const { rpl::producer<> VideoTrackObject::waitingForData() const {
@ -200,10 +198,9 @@ auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
return result; return result;
} else if (!dropStaleFrames } else if (!dropStaleFrames
|| !VideoTrack::IsStale(frame, trackTime)) { || !VideoTrack::IsStale(frame, trackTime)) {
LOG(("READ FRAMES, TRACK TIME: %1").arg(trackTime));
return std::nullopt; return std::nullopt;
} else { } else {
LOG(("DROPPED FRAMES, TRACK TIME: %1").arg(trackTime)); LOG(("[%1] DROPPED FRAMES1, TRACK TIME: %2").arg(crl::now()).arg(trackTime));
} }
} }
}, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState { }, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
@ -240,7 +237,6 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
return FrameResult::Waiting; return FrameResult::Waiting;
} }
const auto position = currentFramePosition(); const auto position = currentFramePosition();
LOG(("GOT FRAME: %1 (queue %2)").arg(position).arg(_stream.queue.size()));
if (position == kTimeUnknown) { if (position == kTimeUnknown) {
interrupt(); interrupt();
_error(Error::InvalidData); _error(Error::InvalidData);
@ -253,7 +249,7 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
} }
void VideoTrackObject::presentFrameIfNeeded() { void VideoTrackObject::presentFrameIfNeeded() {
if (_pausedTime != kTimeUnknown) { if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) {
return; return;
} }
const auto time = trackTime(); const auto time = trackTime();
@ -278,22 +274,16 @@ void VideoTrackObject::presentFrameIfNeeded() {
Ensures(VideoTrack::IsRasterized(frame)); Ensures(VideoTrack::IsRasterized(frame));
}; };
const auto presented = _shared->presentFrame( const auto presented = _shared->presentFrame(
time.trackTime, time,
_options.speed,
_options.dropStaleFrames, _options.dropStaleFrames,
rasterize); rasterize);
if (presented.displayPosition == kFinishedPosition) { if (presented.displayPosition == kFinishedPosition) {
interrupt(); interrupt();
_nextFrameTimeUpdates = rpl::event_stream<crl::time>(); _checkNextFrame = rpl::event_stream<>();
return; return;
} else if (presented.displayPosition != kTimeUnknown) { } else if (presented.displayPosition != kTimeUnknown) {
const auto trackLeft = presented.displayPosition - time.trackTime; _checkNextFrame.fire({});
// We don't use rpl::variable, because we want an event each time
// we assign a new value, even if the value really didn't change.
_nextFrameDisplayTime = time.worldTime
+ crl::time(std::round(trackLeft / _options.speed));
LOG(("NOW: %1, FRAME POSITION: %2, TRACK TIME: %3, TRACK LEFT: %4, NEXT: %5").arg(time.worldTime).arg(presented.displayPosition).arg(time.trackTime).arg(trackLeft).arg(_nextFrameDisplayTime));
_nextFrameTimeUpdates.fire_copy(_nextFrameDisplayTime);
} }
if (presented.nextCheckDelay != kTimeUnknown) { if (presented.nextCheckDelay != kTimeUnknown) {
Assert(presented.nextCheckDelay >= 0); Assert(presented.nextCheckDelay >= 0);
@ -403,11 +393,6 @@ bool VideoTrackObject::processFirstFrame() {
crl::time VideoTrackObject::currentFramePosition() const { crl::time VideoTrackObject::currentFramePosition() const {
const auto position = FramePosition(_stream); const auto position = FramePosition(_stream);
LOG(("FRAME_POSITION: %1 (pts: %2, dts: %3, duration: %4)"
).arg(position
).arg(PtsToTime(_stream.frame->pts, _stream.timeBase)
).arg(PtsToTime(_stream.frame->pkt_dts, _stream.timeBase)
).arg(PtsToTime(_stream.frame->pkt_duration, _stream.timeBase)));
if (position == kTimeUnknown || position == kFinishedPosition) { if (position == kTimeUnknown || position == kFinishedPosition) {
return kTimeUnknown; return kTimeUnknown;
} }
@ -502,6 +487,13 @@ not_null<VideoTrack::Frame*> VideoTrack::Shared::getFrame(int index) {
return &_frames[index]; return &_frames[index];
} }
not_null<const VideoTrack::Frame*> VideoTrack::Shared::getFrame(
int index) const {
Expects(index >= 0 && index < kFramesCount);
return &_frames[index];
}
auto VideoTrack::Shared::prepareState( auto VideoTrack::Shared::prepareState(
crl::time trackTime, crl::time trackTime,
bool dropStaleFrames) bool dropStaleFrames)
@ -522,7 +514,7 @@ auto VideoTrack::Shared::prepareState(
} else if (IsStale(frame, trackTime)) { } else if (IsStale(frame, trackTime)) {
std::swap(*frame, *next); std::swap(*frame, *next);
next->displayed = kDisplaySkipped; next->displayed = kDisplaySkipped;
LOG(("DROPPED FRAMES, TRACK TIME: %1").arg(trackTime)); LOG(("[%1] DROPPED FRAMES2, TRACK TIME: %2").arg(crl::now()).arg(trackTime));
return next; return next;
} else { } else {
return PrepareNextCheck(frame->position - trackTime + 1); return PrepareNextCheck(frame->position - trackTime + 1);
@ -548,9 +540,20 @@ auto VideoTrack::Shared::prepareState(
Unexpected("Counter value in VideoTrack::Shared::prepareState."); Unexpected("Counter value in VideoTrack::Shared::prepareState.");
} }
// Sometimes main thread subscribes to check frame requests before
// the first frame is ready and presented and sometimes after.
bool VideoTrack::Shared::firstPresentHappened() const {
switch (counter()) {
case 0: return false;
case 1: return true;
}
Unexpected("Counter value in VideoTrack::Shared::firstPresentHappened.");
}
template <typename RasterizeCallback> template <typename RasterizeCallback>
auto VideoTrack::Shared::presentFrame( auto VideoTrack::Shared::presentFrame(
crl::time trackTime, TimePoint time,
float64 playbackSpeed,
bool dropStaleFrames, bool dropStaleFrames,
RasterizeCallback &&rasterize) RasterizeCallback &&rasterize)
-> PresentFrame { -> PresentFrame {
@ -565,6 +568,10 @@ auto VideoTrack::Shared::presentFrame(
// Error happened during frame prepare. // Error happened during frame prepare.
return { kTimeUnknown, kTimeUnknown }; return { kTimeUnknown, kTimeUnknown };
} }
const auto trackLeft = position - time.trackTime;
frame->display = time.worldTime
+ crl::time(std::round(trackLeft / playbackSpeed));
LOG(("[%1] SCHEDULE %5, FRAME POSITION: %2, TRACK TIME: %3, TRACK LEFT: %4").arg(time.worldTime).arg(position).arg(time.trackTime).arg(trackLeft).arg(frame->display));
// Release this frame to the main thread for rendering. // Release this frame to the main thread for rendering.
_counter.store( _counter.store(
@ -582,13 +589,12 @@ auto VideoTrack::Shared::presentFrame(
return { kTimeUnknown, crl::time(0) }; return { kTimeUnknown, crl::time(0) };
} else if (next->position == kFinishedPosition } else if (next->position == kFinishedPosition
|| !dropStaleFrames || !dropStaleFrames
|| IsStale(frame, trackTime)) { || IsStale(frame, time.trackTime)) {
return { kTimeUnknown, kTimeUnknown }; return { kTimeUnknown, kTimeUnknown };
} }
return { kTimeUnknown, (frame->position - trackTime + 1) }; return { kTimeUnknown, (frame->position - time.trackTime + 1) };
}; };
LOG(("PRESENT COUNTER: %1").arg(counter()));
switch (counter()) { switch (counter()) {
case 0: return present(0, 1); case 0: return present(0, 1);
case 1: return nextCheckDelay(2); case 1: return nextCheckDelay(2);
@ -602,6 +608,26 @@ auto VideoTrack::Shared::presentFrame(
Unexpected("Counter value in VideoTrack::Shared::prepareState."); Unexpected("Counter value in VideoTrack::Shared::prepareState.");
} }
crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
const auto frameDisplayTime = [&](int index) {
const auto frame = getFrame(index);
Assert(frame->displayed == kTimeUnknown);
return frame->display;
};
switch (counter()) {
case 0: return kTimeUnknown;
case 1: return frameDisplayTime(1);
case 2: return kTimeUnknown;
case 3: return frameDisplayTime(2);
case 4: return kTimeUnknown;
case 5: return frameDisplayTime(3);
case 6: return kTimeUnknown;
case 7: return frameDisplayTime(0);
}
Unexpected("Counter value in VideoTrack::Shared::nextFrameDisplayTime.");
}
crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) { crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
const auto markAndJump = [&](int counter, int index) { const auto markAndJump = [&](int counter, int index) {
const auto frame = getFrame(index); const auto frame = getFrame(index);
@ -614,7 +640,6 @@ crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
return frame->position; return frame->position;
}; };
switch (counter()) { switch (counter()) {
case 0: return kTimeUnknown; case 0: return kTimeUnknown;
case 1: return markAndJump(1, 1); case 1: return markAndJump(1, 1);
@ -695,6 +720,10 @@ void VideoTrack::setSpeed(float64 speed) {
}); });
} }
crl::time VideoTrack::nextFrameDisplayTime() const {
return _shared->nextFrameDisplayTime();
}
crl::time VideoTrack::markFrameDisplayed(crl::time now) { crl::time VideoTrack::markFrameDisplayed(crl::time now) {
const auto position = _shared->markFrameDisplayed(now); const auto position = _shared->markFrameDisplayed(now);
if (position != kTimeUnknown) { if (position != kTimeUnknown) {
@ -749,9 +778,9 @@ bool VideoTrack::IsStale(not_null<Frame*> frame, crl::time trackTime) {
return (frame->position < trackTime); return (frame->position < trackTime);
} }
rpl::producer<crl::time> VideoTrack::renderNextFrame() const { rpl::producer<> VideoTrack::checkNextFrame() const {
return _wrapped.producer_on_main([](const Implementation &unwrapped) { return _wrapped.producer_on_main([](const Implementation &unwrapped) {
return unwrapped.displayFrameAt(); return unwrapped.checkNextFrame();
}); });
} }

View File

@ -47,8 +47,9 @@ public:
// Called from the main thread. // Called from the main thread.
// Returns the position of the displayed frame. // Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now); [[nodiscard]] crl::time markFrameDisplayed(crl::time now);
[[nodiscard]] crl::time nextFrameDisplayTime() const;
[[nodiscard]] QImage frame(const FrameRequest &request); [[nodiscard]] QImage frame(const FrameRequest &request);
[[nodiscard]] rpl::producer<crl::time> renderNextFrame() const; [[nodiscard]] rpl::producer<> checkNextFrame() const;
[[nodiscard]] rpl::producer<> waitingForData() const; [[nodiscard]] rpl::producer<> waitingForData() const;
// Called from the main thread. // Called from the main thread.
@ -62,6 +63,7 @@ private:
QImage original; QImage original;
crl::time position = kTimeUnknown; crl::time position = kTimeUnknown;
crl::time displayed = kTimeUnknown; crl::time displayed = kTimeUnknown;
crl::time display = kTimeUnknown;
FrameRequest request; FrameRequest request;
QImage prepared; QImage prepared;
@ -90,17 +92,21 @@ private:
// RasterizeCallback(not_null<Frame*>). // RasterizeCallback(not_null<Frame*>).
template <typename RasterizeCallback> template <typename RasterizeCallback>
[[nodiscard]] PresentFrame presentFrame( [[nodiscard]] PresentFrame presentFrame(
crl::time trackTime, TimePoint trackTime,
float64 playbackSpeed,
bool dropStaleFrames, bool dropStaleFrames,
RasterizeCallback &&rasterize); RasterizeCallback &&rasterize);
[[nodiscard]] bool firstPresentHappened() const;
// Called from the main thread. // Called from the main thread.
// Returns the position of the displayed frame. // Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now); [[nodiscard]] crl::time markFrameDisplayed(crl::time now);
[[nodiscard]] crl::time nextFrameDisplayTime() const;
[[nodiscard]] not_null<Frame*> frameForPaint(); [[nodiscard]] not_null<Frame*> frameForPaint();
private: private:
[[nodiscard]] not_null<Frame*> getFrame(int index); [[nodiscard]] not_null<Frame*> getFrame(int index);
[[nodiscard]] not_null<const Frame*> getFrame(int index) const;
[[nodiscard]] int counter() const; [[nodiscard]] int counter() const;
static constexpr auto kCounterUninitialized = -1; static constexpr auto kCounterUninitialized = -1;

View File

@ -8,6 +8,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/effects/animations.h" #include "ui/effects/animations.h"
#include "core/application.h" #include "core/application.h"
#include "core/sandbox.h"
namespace Ui { namespace Ui {
namespace Animations { namespace Animations {
@ -32,6 +33,13 @@ void Basic::stop() {
} }
} }
Manager::Manager() {
Core::Sandbox::Instance().widgetUpdateRequests(
) | rpl::start_with_next([=] {
update();
}, _lifetime);
}
void Manager::start(not_null<Basic*> animation) { void Manager::start(not_null<Basic*> animation) {
if (_updating) { if (_updating) {
_starting.push_back(animation); _starting.push_back(animation);

View File

@ -93,6 +93,8 @@ private:
class Manager final : private QObject { class Manager final : private QObject {
public: public:
Manager();
void update(); void update();
private: private:
@ -113,6 +115,7 @@ private:
bool _scheduled = false; bool _scheduled = false;
std::vector<Basic*> _active; std::vector<Basic*> _active;
std::vector<not_null<Basic*>> _starting; std::vector<not_null<Basic*>> _starting;
rpl::lifetime _lifetime;
}; };

View File

@ -55,12 +55,9 @@ void ContinuousSlider::setValue(float64 value) {
void ContinuousSlider::setValue(float64 value, float64 receivedTill) { void ContinuousSlider::setValue(float64 value, float64 receivedTill) {
if (_value != value || _receivedTill != receivedTill) { if (_value != value || _receivedTill != receivedTill) {
LOG(("UPDATED"));
_value = value; _value = value;
_receivedTill = receivedTill; _receivedTill = receivedTill;
update(); update();
} else {
LOG(("SKIPPED"));
} }
} }