Optimized video frame pushing.

This commit is contained in:
John Preston 2019-03-07 17:23:19 +04:00
parent 5c4b459f57
commit a56a12a1ef
9 changed files with 143 additions and 54 deletions

View File

@ -500,8 +500,16 @@ bool Sandbox::notify(QObject *receiver, QEvent *e) {
const auto wrap = createEventNestingLevel();
const auto type = e->type();
if ((type == QEvent::UpdateRequest) && _application) {
_application->animationManager().update();
if (type == QEvent::UpdateRequest) {
_widgetUpdateRequests.fire({});
// Profiling.
//const auto time = crl::now();
//LOG(("[%1] UPDATE STARTED").arg(time));
//const auto guard = gsl::finally([&] {
// const auto now = crl::now();
// LOG(("[%1] UPDATE FINISHED (%2)").arg(now).arg(now - time));
//});
//return QApplication::notify(receiver, e);
}
return QApplication::notify(receiver, e);
}
@ -550,6 +558,10 @@ void Sandbox::resumeDelayedWindowActivations() {
_delayedActivationsPaused = false;
}
rpl::producer<> Sandbox::widgetUpdateRequests() const {
return _widgetUpdateRequests.events();
}
ProxyData Sandbox::sandboxProxy() const {
return _sandboxProxy;
}

View File

@ -39,6 +39,8 @@ public:
void pauseDelayedWindowActivations();
void resumeDelayedWindowActivations();
rpl::producer<> widgetUpdateRequests() const;
ProxyData sandboxProxy() const;
static Sandbox &Instance() {
@ -109,6 +111,8 @@ private:
QByteArray _lastCrashDump;
ProxyData _sandboxProxy;
rpl::event_stream<> _widgetUpdateRequests;
};
} // namespace Core

View File

@ -13,6 +13,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_video_track.h"
#include "media/audio/media_audio.h" // for SupportsSpeedControl()
#include "data/data_document.h" // for DocumentData::duration()
#include "core/sandbox.h" // for widgetUpdateRequests() producer
namespace Media {
namespace Streaming {
@ -81,25 +82,38 @@ Player::Player(
std::unique_ptr<Loader> loader)
: _file(std::make_unique<File>(owner, std::move(loader)))
, _remoteLoader(_file->isRemoteLoader())
, _renderFrameTimer([=] { checkNextFrame(); }) {
, _renderFrameTimer([=] { checkNextFrameRender(); }) {
}
not_null<FileDelegate*> Player::delegate() {
return static_cast<FileDelegate*>(this);
}
void Player::checkNextFrame() {
void Player::checkNextFrameRender() {
Expects(_nextFrameTime != kTimeUnknown);
Expects(!_renderFrameTimer.isActive());
const auto now = crl::now();
if (now < _nextFrameTime) {
_renderFrameTimer.callOnce(_nextFrameTime - now);
if (!_renderFrameTimer.isActive()) {
_renderFrameTimer.callOnce(_nextFrameTime - now);
}
} else {
_renderFrameTimer.cancel();
_nextFrameTime = kTimeUnknown;
renderFrame(now);
}
}
void Player::checkNextFrameAvailability() {
Expects(_video != nullptr);
_nextFrameTime = _video->nextFrameDisplayTime();
if (_nextFrameTime != kTimeUnknown) {
LOG(("[%2] RENDERING AT: %1").arg(_nextFrameTime).arg(crl::now()));
checkVideoStep();
}
}
void Player::renderFrame(crl::time now) {
Expects(_video != nullptr);
@ -454,6 +468,7 @@ void Player::updatePausedState() {
if (!_paused && _stage == Stage::Ready) {
const auto guard = base::make_weak(&_sessionGuard);
start();
LOG(("[%1] STARTED.").arg(crl::now()));
if (!guard) {
return;
}
@ -548,19 +563,24 @@ void Player::start() {
}
if (guard && _video) {
_video->renderNextFrame(
) | rpl::start_with_next_done([=](crl::time when) {
_nextFrameTime = when;
LOG(("RENDERING AT: %1").arg(when));
checkNextFrame();
_video->checkNextFrame(
) | rpl::start_with_next_done([=] {
checkVideoStep();
}, [=] {
Expects(_stage == Stage::Started);
Assert(_stage == Stage::Started);
_videoFinished = true;
if (!_audio || _audioFinished) {
_updates.fire({ Finished() });
}
}, _sessionLifetime);
Core::Sandbox::Instance().widgetUpdateRequests(
) | rpl::filter([=] {
return !_videoFinished;
}) | rpl::start_with_next([=] {
checkVideoStep();
}, _sessionLifetime);
}
if (guard && _audio) {
trackSendReceivedTill(*_audio, _information.audio.state);
@ -570,6 +590,14 @@ void Player::start() {
}
}
void Player::checkVideoStep() {
if (_nextFrameTime != kTimeUnknown) {
checkNextFrameRender();
} else {
checkNextFrameAvailability();
}
}
void Player::stop() {
_file->stop();
_sessionLifetime = rpl::lifetime();

View File

@ -91,7 +91,9 @@ private:
void start();
void provideStartInformation();
void fail(Error error);
void checkNextFrame();
void checkVideoStep();
void checkNextFrameRender();
void checkNextFrameAvailability();
void renderFrame(crl::time now);
void audioReceivedTill(crl::time position);
void audioPlayedTill(crl::time position);

View File

@ -36,7 +36,7 @@ public:
void process(Packet &&packet);
[[nodisacrd]] rpl::producer<crl::time> displayFrameAt() const;
[[nodisacrd]] rpl::producer<> checkNextFrame() const;
[[nodisacrd]] rpl::producer<> waitingForData() const;
void pause(crl::time time);
@ -91,8 +91,7 @@ private:
crl::time _resumedTime = kTimeUnknown;
mutable TimePoint _syncTimePoint;
crl::time _framePositionShift = 0;
crl::time _nextFrameDisplayTime = kTimeUnknown;
rpl::event_stream<crl::time> _nextFrameTimeUpdates;
rpl::event_stream<> _checkNextFrame;
rpl::event_stream<> _waitingForData;
FrameRequest _request;
@ -122,13 +121,12 @@ VideoTrackObject::VideoTrackObject(
Expects(_error != nullptr);
}
rpl::producer<crl::time> VideoTrackObject::displayFrameAt() const {
rpl::producer<> VideoTrackObject::checkNextFrame() const {
return interrupted()
? (rpl::complete<crl::time>() | rpl::type_erased())
: (_nextFrameDisplayTime == kTimeUnknown)
? (_nextFrameTimeUpdates.events() | rpl::type_erased())
: _nextFrameTimeUpdates.events_starting_with_copy(
_nextFrameDisplayTime);
? (rpl::complete<>() | rpl::type_erased())
: !_shared->firstPresentHappened()
? (_checkNextFrame.events() | rpl::type_erased())
: _checkNextFrame.events_starting_with({});
}
rpl::producer<> VideoTrackObject::waitingForData() const {
@ -200,10 +198,9 @@ auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
return result;
} else if (!dropStaleFrames
|| !VideoTrack::IsStale(frame, trackTime)) {
LOG(("READ FRAMES, TRACK TIME: %1").arg(trackTime));
return std::nullopt;
} else {
LOG(("DROPPED FRAMES, TRACK TIME: %1").arg(trackTime));
LOG(("[%1] DROPPED FRAMES1, TRACK TIME: %2").arg(crl::now()).arg(trackTime));
}
}
}, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
@ -240,7 +237,6 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
return FrameResult::Waiting;
}
const auto position = currentFramePosition();
LOG(("GOT FRAME: %1 (queue %2)").arg(position).arg(_stream.queue.size()));
if (position == kTimeUnknown) {
interrupt();
_error(Error::InvalidData);
@ -253,7 +249,7 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
}
void VideoTrackObject::presentFrameIfNeeded() {
if (_pausedTime != kTimeUnknown) {
if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) {
return;
}
const auto time = trackTime();
@ -278,22 +274,16 @@ void VideoTrackObject::presentFrameIfNeeded() {
Ensures(VideoTrack::IsRasterized(frame));
};
const auto presented = _shared->presentFrame(
time.trackTime,
time,
_options.speed,
_options.dropStaleFrames,
rasterize);
if (presented.displayPosition == kFinishedPosition) {
interrupt();
_nextFrameTimeUpdates = rpl::event_stream<crl::time>();
_checkNextFrame = rpl::event_stream<>();
return;
} else if (presented.displayPosition != kTimeUnknown) {
const auto trackLeft = presented.displayPosition - time.trackTime;
// We don't use rpl::variable, because we want an event each time
// we assign a new value, even if the value really didn't change.
_nextFrameDisplayTime = time.worldTime
+ crl::time(std::round(trackLeft / _options.speed));
LOG(("NOW: %1, FRAME POSITION: %2, TRACK TIME: %3, TRACK LEFT: %4, NEXT: %5").arg(time.worldTime).arg(presented.displayPosition).arg(time.trackTime).arg(trackLeft).arg(_nextFrameDisplayTime));
_nextFrameTimeUpdates.fire_copy(_nextFrameDisplayTime);
_checkNextFrame.fire({});
}
if (presented.nextCheckDelay != kTimeUnknown) {
Assert(presented.nextCheckDelay >= 0);
@ -403,11 +393,6 @@ bool VideoTrackObject::processFirstFrame() {
crl::time VideoTrackObject::currentFramePosition() const {
const auto position = FramePosition(_stream);
LOG(("FRAME_POSITION: %1 (pts: %2, dts: %3, duration: %4)"
).arg(position
).arg(PtsToTime(_stream.frame->pts, _stream.timeBase)
).arg(PtsToTime(_stream.frame->pkt_dts, _stream.timeBase)
).arg(PtsToTime(_stream.frame->pkt_duration, _stream.timeBase)));
if (position == kTimeUnknown || position == kFinishedPosition) {
return kTimeUnknown;
}
@ -502,6 +487,13 @@ not_null<VideoTrack::Frame*> VideoTrack::Shared::getFrame(int index) {
return &_frames[index];
}
not_null<const VideoTrack::Frame*> VideoTrack::Shared::getFrame(
int index) const {
Expects(index >= 0 && index < kFramesCount);
return &_frames[index];
}
auto VideoTrack::Shared::prepareState(
crl::time trackTime,
bool dropStaleFrames)
@ -522,7 +514,7 @@ auto VideoTrack::Shared::prepareState(
} else if (IsStale(frame, trackTime)) {
std::swap(*frame, *next);
next->displayed = kDisplaySkipped;
LOG(("DROPPED FRAMES, TRACK TIME: %1").arg(trackTime));
LOG(("[%1] DROPPED FRAMES2, TRACK TIME: %2").arg(crl::now()).arg(trackTime));
return next;
} else {
return PrepareNextCheck(frame->position - trackTime + 1);
@ -548,9 +540,20 @@ auto VideoTrack::Shared::prepareState(
Unexpected("Counter value in VideoTrack::Shared::prepareState.");
}
// Sometimes main thread subscribes to check frame requests before
// the first frame is ready and presented and sometimes after.
bool VideoTrack::Shared::firstPresentHappened() const {
switch (counter()) {
case 0: return false;
case 1: return true;
}
Unexpected("Counter value in VideoTrack::Shared::firstPresentHappened.");
}
template <typename RasterizeCallback>
auto VideoTrack::Shared::presentFrame(
crl::time trackTime,
TimePoint time,
float64 playbackSpeed,
bool dropStaleFrames,
RasterizeCallback &&rasterize)
-> PresentFrame {
@ -565,6 +568,10 @@ auto VideoTrack::Shared::presentFrame(
// Error happened during frame prepare.
return { kTimeUnknown, kTimeUnknown };
}
const auto trackLeft = position - time.trackTime;
frame->display = time.worldTime
+ crl::time(std::round(trackLeft / playbackSpeed));
LOG(("[%1] SCHEDULE %5, FRAME POSITION: %2, TRACK TIME: %3, TRACK LEFT: %4").arg(time.worldTime).arg(position).arg(time.trackTime).arg(trackLeft).arg(frame->display));
// Release this frame to the main thread for rendering.
_counter.store(
@ -582,13 +589,12 @@ auto VideoTrack::Shared::presentFrame(
return { kTimeUnknown, crl::time(0) };
} else if (next->position == kFinishedPosition
|| !dropStaleFrames
|| IsStale(frame, trackTime)) {
|| IsStale(frame, time.trackTime)) {
return { kTimeUnknown, kTimeUnknown };
}
return { kTimeUnknown, (frame->position - trackTime + 1) };
return { kTimeUnknown, (frame->position - time.trackTime + 1) };
};
LOG(("PRESENT COUNTER: %1").arg(counter()));
switch (counter()) {
case 0: return present(0, 1);
case 1: return nextCheckDelay(2);
@ -602,6 +608,26 @@ auto VideoTrack::Shared::presentFrame(
Unexpected("Counter value in VideoTrack::Shared::prepareState.");
}
crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
const auto frameDisplayTime = [&](int index) {
const auto frame = getFrame(index);
Assert(frame->displayed == kTimeUnknown);
return frame->display;
};
switch (counter()) {
case 0: return kTimeUnknown;
case 1: return frameDisplayTime(1);
case 2: return kTimeUnknown;
case 3: return frameDisplayTime(2);
case 4: return kTimeUnknown;
case 5: return frameDisplayTime(3);
case 6: return kTimeUnknown;
case 7: return frameDisplayTime(0);
}
Unexpected("Counter value in VideoTrack::Shared::nextFrameDisplayTime.");
}
crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
const auto markAndJump = [&](int counter, int index) {
const auto frame = getFrame(index);
@ -614,7 +640,6 @@ crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
return frame->position;
};
switch (counter()) {
case 0: return kTimeUnknown;
case 1: return markAndJump(1, 1);
@ -695,6 +720,10 @@ void VideoTrack::setSpeed(float64 speed) {
});
}
crl::time VideoTrack::nextFrameDisplayTime() const {
return _shared->nextFrameDisplayTime();
}
crl::time VideoTrack::markFrameDisplayed(crl::time now) {
const auto position = _shared->markFrameDisplayed(now);
if (position != kTimeUnknown) {
@ -749,9 +778,9 @@ bool VideoTrack::IsStale(not_null<Frame*> frame, crl::time trackTime) {
return (frame->position < trackTime);
}
rpl::producer<crl::time> VideoTrack::renderNextFrame() const {
rpl::producer<> VideoTrack::checkNextFrame() const {
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
return unwrapped.displayFrameAt();
return unwrapped.checkNextFrame();
});
}

View File

@ -47,8 +47,9 @@ public:
// Called from the main thread.
// Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
[[nodiscard]] crl::time nextFrameDisplayTime() const;
[[nodiscard]] QImage frame(const FrameRequest &request);
[[nodiscard]] rpl::producer<crl::time> renderNextFrame() const;
[[nodiscard]] rpl::producer<> checkNextFrame() const;
[[nodiscard]] rpl::producer<> waitingForData() const;
// Called from the main thread.
@ -62,6 +63,7 @@ private:
QImage original;
crl::time position = kTimeUnknown;
crl::time displayed = kTimeUnknown;
crl::time display = kTimeUnknown;
FrameRequest request;
QImage prepared;
@ -90,17 +92,21 @@ private:
// RasterizeCallback(not_null<Frame*>).
template <typename RasterizeCallback>
[[nodiscard]] PresentFrame presentFrame(
crl::time trackTime,
TimePoint trackTime,
float64 playbackSpeed,
bool dropStaleFrames,
RasterizeCallback &&rasterize);
[[nodiscard]] bool firstPresentHappened() const;
// Called from the main thread.
// Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
[[nodiscard]] crl::time nextFrameDisplayTime() const;
[[nodiscard]] not_null<Frame*> frameForPaint();
private:
[[nodiscard]] not_null<Frame*> getFrame(int index);
[[nodiscard]] not_null<const Frame*> getFrame(int index) const;
[[nodiscard]] int counter() const;
static constexpr auto kCounterUninitialized = -1;

View File

@ -8,6 +8,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/effects/animations.h"
#include "core/application.h"
#include "core/sandbox.h"
namespace Ui {
namespace Animations {
@ -32,6 +33,13 @@ void Basic::stop() {
}
}
Manager::Manager() {
Core::Sandbox::Instance().widgetUpdateRequests(
) | rpl::start_with_next([=] {
update();
}, _lifetime);
}
void Manager::start(not_null<Basic*> animation) {
if (_updating) {
_starting.push_back(animation);

View File

@ -93,6 +93,8 @@ private:
class Manager final : private QObject {
public:
Manager();
void update();
private:
@ -113,6 +115,7 @@ private:
bool _scheduled = false;
std::vector<Basic*> _active;
std::vector<not_null<Basic*>> _starting;
rpl::lifetime _lifetime;
};

View File

@ -55,12 +55,9 @@ void ContinuousSlider::setValue(float64 value) {
void ContinuousSlider::setValue(float64 value, float64 receivedTill) {
if (_value != value || _receivedTill != receivedTill) {
LOG(("UPDATED"));
_value = value;
_receivedTill = receivedTill;
update();
} else {
LOG(("SKIPPED"));
}
}