mirror of https://github.com/procxx/kepka.git
Implement pause / resume in streaming.
This commit is contained in:
parent
3b369fc98e
commit
a7d9281768
|
@ -294,6 +294,8 @@ void StartStreaming(
|
|||
|
||||
using namespace Media::Streaming;
|
||||
if (auto loader = document->createStreamingLoader(origin)) {
|
||||
static auto player = std::unique_ptr<Player>();
|
||||
|
||||
class Panel
|
||||
#if defined Q_OS_MAC && !defined OS_MAC_OLD
|
||||
: public Ui::RpWidgetWrap<QOpenGLWidget> {
|
||||
|
@ -310,10 +312,18 @@ void StartStreaming(
|
|||
protected:
|
||||
void paintEvent(QPaintEvent *e) override {
|
||||
}
|
||||
void keyPressEvent(QKeyEvent *e) override {
|
||||
if (e->key() == Qt::Key_Space) {
|
||||
if (player->paused()) {
|
||||
player->resume();
|
||||
} else {
|
||||
player->pause();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
static auto player = std::unique_ptr<Player>();
|
||||
static auto video = base::unique_qptr<Panel>();
|
||||
player = std::make_unique<Player>(
|
||||
&document->owner(),
|
||||
|
@ -325,10 +335,10 @@ void StartStreaming(
|
|||
});
|
||||
|
||||
auto options = Media::Streaming::PlaybackOptions();
|
||||
options.speed = 1.;
|
||||
options.speed = 1.7;
|
||||
//options.syncVideoByAudio = false;
|
||||
//options.position = (document->duration() / 2) * crl::time(1000);
|
||||
player->init(options);
|
||||
options.position = (document->duration() / 2) * crl::time(1000);
|
||||
player->play(options);
|
||||
player->updates(
|
||||
) | rpl::start_with_next_error_done([=](Update &&update) {
|
||||
update.data.match([&](Information &update) {
|
||||
|
@ -356,7 +366,6 @@ void StartStreaming(
|
|||
}
|
||||
}, video->lifetime());
|
||||
}
|
||||
player->start();
|
||||
}, [&](PreloadedVideo &update) {
|
||||
}, [&](UpdateVideo &update) {
|
||||
Expects(video != nullptr);
|
||||
|
|
|
@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "media/audio/media_child_ffmpeg_loader.h"
|
||||
#include "media/audio/media_audio_loaders.h"
|
||||
#include "media/audio/media_audio_track.h"
|
||||
#include "media/streaming/media_streaming_utility.h"
|
||||
#include "data/data_document.h"
|
||||
#include "data/data_file_origin.h"
|
||||
#include "platform/platform_audio.h"
|
||||
|
@ -374,10 +375,10 @@ void Mixer::Track::destroyStream() {
|
|||
for (auto i = 0; i != 3; ++i) {
|
||||
stream.buffers[i] = 0;
|
||||
}
|
||||
destroySpeedEffect();
|
||||
resetSpeedEffect();
|
||||
}
|
||||
|
||||
void Mixer::Track::destroySpeedEffect() {
|
||||
void Mixer::Track::resetSpeedEffect() {
|
||||
if (!speedEffect) {
|
||||
return;
|
||||
} else if (alIsEffect(speedEffect->effect)) {
|
||||
|
@ -385,7 +386,7 @@ void Mixer::Track::destroySpeedEffect() {
|
|||
alDeleteAuxiliaryEffectSlots(1, &speedEffect->effectSlot);
|
||||
alDeleteFilters(1, &speedEffect->filter);
|
||||
}
|
||||
speedEffect = nullptr;
|
||||
speedEffect->effect = speedEffect->effectSlot = speedEffect->filter = 0;
|
||||
}
|
||||
|
||||
void Mixer::Track::reattach(AudioMsgId::Type type) {
|
||||
|
@ -418,7 +419,6 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
|
|||
void Mixer::Track::detach() {
|
||||
resetStream();
|
||||
destroyStream();
|
||||
destroySpeedEffect();
|
||||
}
|
||||
|
||||
void Mixer::Track::clear() {
|
||||
|
@ -519,11 +519,13 @@ int Mixer::Track::getNotQueuedBufferIndex() {
|
|||
}
|
||||
|
||||
void Mixer::Track::setVideoData(std::unique_ptr<VideoSoundData> data) {
|
||||
destroySpeedEffect();
|
||||
resetSpeedEffect();
|
||||
if (data && data->speed != 1.) {
|
||||
speedEffect = std::make_unique<SpeedEffect>();
|
||||
speedEffect->speed = data->speed;
|
||||
speedEffect->coarseTune = CoarseTuneForSpeed(data->speed);
|
||||
} else {
|
||||
speedEffect = nullptr;
|
||||
}
|
||||
videoData = std::move(data);
|
||||
}
|
||||
|
@ -787,6 +789,7 @@ void Mixer::play(
|
|||
if (videoData) {
|
||||
current->setVideoData(std::move(videoData));
|
||||
} else {
|
||||
current->setVideoData(nullptr);
|
||||
current->file = audio.audio()->location(true);
|
||||
current->data = audio.audio()->data();
|
||||
notLoadedYet = (current->file.isEmpty() && current->data.isEmpty());
|
||||
|
@ -828,19 +831,19 @@ void Mixer::forceToBufferVideo(const AudioMsgId &audioId) {
|
|||
_loader->forceToBufferVideo(audioId);
|
||||
}
|
||||
|
||||
Mixer::TimeCorrection Mixer::getVideoTimeCorrection(
|
||||
Streaming::TimeCorrection Mixer::getVideoTimeCorrection(
|
||||
const AudioMsgId &audio) const {
|
||||
Expects(audio.type() == AudioMsgId::Type::Video);
|
||||
Expects(audio.playId() != 0);
|
||||
|
||||
auto result = TimeCorrection();
|
||||
auto result = Streaming::TimeCorrection();
|
||||
const auto playId = audio.playId();
|
||||
|
||||
QMutexLocker lock(&AudioMutex);
|
||||
const auto track = trackForType(AudioMsgId::Type::Video);
|
||||
if (track->state.id.playId() == playId && track->lastUpdateWhen > 0) {
|
||||
result.audioPositionValue = track->lastUpdatePosition;
|
||||
result.audioPositionTime = track->lastUpdateWhen;
|
||||
result.trackTime = track->lastUpdatePosition;
|
||||
result.worldTime = track->lastUpdateWhen;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#include "media/streaming/media_streaming_common.h"
|
||||
#include "storage/localimageloader.h"
|
||||
#include "base/bytes.h"
|
||||
|
||||
|
@ -15,6 +14,10 @@ struct VideoSoundData;
|
|||
struct VideoSoundPart;
|
||||
|
||||
namespace Media {
|
||||
namespace Streaming {
|
||||
struct TimeCorrection;
|
||||
} // namespace Streaming
|
||||
|
||||
namespace Audio {
|
||||
|
||||
class Instance;
|
||||
|
@ -123,15 +126,8 @@ public:
|
|||
// Video player audio stream interface.
|
||||
void feedFromVideo(const VideoSoundPart &part);
|
||||
void forceToBufferVideo(const AudioMsgId &audioId);
|
||||
struct TimeCorrection {
|
||||
crl::time audioPositionValue = kTimeUnknown;
|
||||
crl::time audioPositionTime = kTimeUnknown;
|
||||
|
||||
explicit operator bool() const {
|
||||
return (audioPositionValue != kTimeUnknown);
|
||||
}
|
||||
};
|
||||
TimeCorrection getVideoTimeCorrection(const AudioMsgId &audio) const;
|
||||
Streaming::TimeCorrection getVideoTimeCorrection(
|
||||
const AudioMsgId &audio) const;
|
||||
crl::time getVideoCorrectedTime(
|
||||
const AudioMsgId &id,
|
||||
crl::time frameMs,
|
||||
|
@ -244,7 +240,7 @@ private:
|
|||
private:
|
||||
void createStream(AudioMsgId::Type type);
|
||||
void destroyStream();
|
||||
void destroySpeedEffect();
|
||||
void resetSpeedEffect();
|
||||
void resetStream();
|
||||
|
||||
};
|
||||
|
|
|
@ -125,10 +125,15 @@ void AudioTrack::mixerForceToBuffer() {
|
|||
Media::Player::mixer()->forceToBufferVideo(_audioId);
|
||||
}
|
||||
|
||||
void AudioTrack::start(crl::time startTime) {
|
||||
void AudioTrack::pause(crl::time time) {
|
||||
Expects(initialized());
|
||||
|
||||
Media::Player::mixer()->pause(_audioId, true);
|
||||
}
|
||||
|
||||
void AudioTrack::resume(crl::time time) {
|
||||
Expects(initialized());
|
||||
|
||||
// #TODO streaming support start() when paused.
|
||||
Media::Player::mixer()->resume(_audioId, true);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,8 @@ public:
|
|||
|
||||
// Called from the main thread.
|
||||
// Must be called after 'ready' was invoked.
|
||||
void start(crl::time startTime);
|
||||
void pause(crl::time time);
|
||||
void resume(crl::time time);
|
||||
|
||||
// Called from the main thread.
|
||||
// Non-const, because we subscribe to changes on the first call.
|
||||
|
|
|
@ -71,37 +71,6 @@ not_null<FileDelegate*> Player::delegate() {
|
|||
return static_cast<FileDelegate*>(this);
|
||||
}
|
||||
|
||||
void Player::start() {
|
||||
Expects(_stage == Stage::Ready);
|
||||
|
||||
_stage = Stage::Started;
|
||||
if (_audio) {
|
||||
_audio->playPosition(
|
||||
) | rpl::start_with_next_done([=](crl::time position) {
|
||||
audioPlayedTill(position);
|
||||
}, [=] {
|
||||
// audio finished
|
||||
}, _lifetime);
|
||||
}
|
||||
if (_video) {
|
||||
_video->renderNextFrame(
|
||||
) | rpl::start_with_next_done([=](crl::time when) {
|
||||
_nextFrameTime = when;
|
||||
checkNextFrame();
|
||||
}, [=] {
|
||||
// video finished
|
||||
}, _lifetime);
|
||||
}
|
||||
|
||||
_startedTime = crl::now();
|
||||
if (_audio) {
|
||||
_audio->start(_startedTime);
|
||||
}
|
||||
if (_video) {
|
||||
_video->start(_startedTime);
|
||||
}
|
||||
}
|
||||
|
||||
void Player::checkNextFrame() {
|
||||
Expects(_nextFrameTime != kTimeUnknown);
|
||||
|
||||
|
@ -238,9 +207,6 @@ void Player::fileWaitingForData() {
|
|||
return;
|
||||
}
|
||||
_waitingForData = true;
|
||||
crl::on_main(&_sessionGuard, [=] {
|
||||
_updates.fire({ WaitingForData() });
|
||||
});
|
||||
if (_audio) {
|
||||
_audio->waitForData();
|
||||
}
|
||||
|
@ -328,6 +294,11 @@ void Player::provideStartInformation() {
|
|||
_information.video.cover = QImage();
|
||||
|
||||
_updates.fire(Update{ std::move(copy) });
|
||||
|
||||
if (_stage == Stage::Ready && !_paused) {
|
||||
_paused = true;
|
||||
resume();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -338,7 +309,7 @@ void Player::fail() {
|
|||
stopGuarded();
|
||||
}
|
||||
|
||||
void Player::init(const PlaybackOptions &options) {
|
||||
void Player::play(const PlaybackOptions &options) {
|
||||
Expects(options.speed >= 0.5 && options.speed <= 2.);
|
||||
|
||||
stop();
|
||||
|
@ -349,15 +320,67 @@ void Player::init(const PlaybackOptions &options) {
|
|||
}
|
||||
|
||||
void Player::pause() {
|
||||
Expects(_stage != Stage::Uninitialized && _stage != Stage::Failed);
|
||||
|
||||
if (_paused) {
|
||||
return;
|
||||
}
|
||||
_paused = true;
|
||||
// #TODO streaming pause
|
||||
if (_stage == Stage::Started) {
|
||||
_pausedTime = crl::now();
|
||||
if (_audio) {
|
||||
_audio->pause(_pausedTime);
|
||||
}
|
||||
if (_video) {
|
||||
_video->pause(_pausedTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Player::resume() {
|
||||
Expects(_stage != Stage::Uninitialized && _stage != Stage::Failed);
|
||||
|
||||
if (!_paused) {
|
||||
return;
|
||||
}
|
||||
_paused = false;
|
||||
// #TODO streaming pause
|
||||
if (_stage == Stage::Ready) {
|
||||
start();
|
||||
}
|
||||
if (_stage == Stage::Started) {
|
||||
_startedTime = crl::now();
|
||||
if (_audio) {
|
||||
_audio->resume(_startedTime);
|
||||
}
|
||||
if (_video) {
|
||||
_video->resume(_startedTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Player::start() {
|
||||
Expects(_stage == Stage::Ready);
|
||||
|
||||
_stage = Stage::Started;
|
||||
if (_audio) {
|
||||
_audio->playPosition(
|
||||
) | rpl::start_with_next_done([=](crl::time position) {
|
||||
audioPlayedTill(position);
|
||||
}, [=] {
|
||||
// audio finished
|
||||
}, _lifetime);
|
||||
}
|
||||
if (_video) {
|
||||
_video->renderNextFrame(
|
||||
) | rpl::start_with_next_done([=](crl::time when) {
|
||||
_nextFrameTime = when;
|
||||
checkNextFrame();
|
||||
}, [=] {
|
||||
// video finished
|
||||
}, _lifetime);
|
||||
}
|
||||
|
||||
}
|
||||
void Player::stop() {
|
||||
_file->stop();
|
||||
_audio = nullptr;
|
||||
|
|
|
@ -33,8 +33,7 @@ public:
|
|||
Player(const Player &other) = delete;
|
||||
Player &operator=(const Player &other) = delete;
|
||||
|
||||
void init(const PlaybackOptions &options);
|
||||
void start();
|
||||
void play(const PlaybackOptions &options);
|
||||
void pause();
|
||||
void resume();
|
||||
void stop();
|
||||
|
@ -76,6 +75,7 @@ private:
|
|||
// Called from the main thread.
|
||||
void streamReady(Information &&information);
|
||||
void streamFailed();
|
||||
void start();
|
||||
void provideStartInformation();
|
||||
void fail();
|
||||
void checkNextFrame();
|
||||
|
@ -118,6 +118,7 @@ private:
|
|||
bool _paused = false;
|
||||
|
||||
crl::time _startedTime = kTimeUnknown;
|
||||
crl::time _pausedTime = kTimeUnknown;
|
||||
crl::time _nextFrameTime = kTimeUnknown;
|
||||
base::Timer _renderFrameTimer;
|
||||
rpl::event_stream<Update, Error> _updates;
|
||||
|
|
|
@ -20,6 +20,18 @@ namespace Streaming {
|
|||
|
||||
constexpr auto kUniversalTimeBase = AVRational{ 1, AV_TIME_BASE };
|
||||
|
||||
struct TimeCorrection {
|
||||
crl::time trackTime = kTimeUnknown;
|
||||
crl::time worldTime = kTimeUnknown;
|
||||
|
||||
bool valid() const {
|
||||
return (trackTime != kTimeUnknown) && (worldTime != kTimeUnknown);
|
||||
}
|
||||
explicit operator bool() const {
|
||||
return valid();
|
||||
}
|
||||
};
|
||||
|
||||
class AvErrorWrap {
|
||||
public:
|
||||
AvErrorWrap(int code = 0) : _code(code) {
|
||||
|
|
|
@ -37,7 +37,8 @@ public:
|
|||
|
||||
[[nodisacrd]] rpl::producer<crl::time> displayFrameAt() const;
|
||||
|
||||
void start(crl::time startTime);
|
||||
void pause(crl::time time);
|
||||
void resume(crl::time time);
|
||||
void interrupt();
|
||||
void frameDisplayed();
|
||||
|
||||
|
@ -72,8 +73,9 @@ private:
|
|||
bool _noMoreData = false;
|
||||
FnMut<void(const Information &)> _ready;
|
||||
Fn<void()> _error;
|
||||
crl::time _startedTime = kTimeUnknown;
|
||||
crl::time _startedPosition = kTimeUnknown;
|
||||
crl::time _pausedTime = kTimeUnknown;
|
||||
crl::time _resumedTime = kTimeUnknown;
|
||||
mutable TimeCorrection _timeCorrection;
|
||||
mutable crl::time _previousFramePosition = kTimeUnknown;
|
||||
rpl::variable<crl::time> _nextFrameDisplayTime = kTimeUnknown;
|
||||
|
||||
|
@ -179,6 +181,9 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
|
|||
}
|
||||
|
||||
void VideoTrackObject::presentFrameIfNeeded() {
|
||||
if (_pausedTime != kTimeUnknown) {
|
||||
return;
|
||||
}
|
||||
const auto time = trackTime();
|
||||
const auto presented = _shared->presentFrame(time.trackNow);
|
||||
if (presented.displayPosition != kTimeUnknown) {
|
||||
|
@ -189,9 +194,29 @@ void VideoTrackObject::presentFrameIfNeeded() {
|
|||
queueReadFrames(presented.nextCheckDelay);
|
||||
}
|
||||
|
||||
void VideoTrackObject::start(crl::time startTime) {
|
||||
_startedTime = startTime;
|
||||
void VideoTrackObject::pause(crl::time time) {
|
||||
Expects(_timeCorrection.valid());
|
||||
|
||||
if (_pausedTime == kTimeUnknown) {
|
||||
_pausedTime = time;
|
||||
}
|
||||
}
|
||||
|
||||
void VideoTrackObject::resume(crl::time time) {
|
||||
Expects(_timeCorrection.trackTime != kTimeUnknown);
|
||||
|
||||
// Resumed time used to validate sync to audio.
|
||||
_resumedTime = time;
|
||||
if (_pausedTime != kTimeUnknown) {
|
||||
Assert(_pausedTime <= time);
|
||||
_timeCorrection.worldTime += (time - _pausedTime);
|
||||
_pausedTime = kTimeUnknown;
|
||||
} else {
|
||||
_timeCorrection.worldTime = time;
|
||||
}
|
||||
queueReadFrames();
|
||||
|
||||
Ensures(_timeCorrection.valid());
|
||||
}
|
||||
|
||||
bool VideoTrackObject::interrupted() const {
|
||||
|
@ -221,7 +246,7 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
|
|||
if (frame.isNull()) {
|
||||
return false;
|
||||
}
|
||||
_shared->init(std::move(frame), _startedPosition);
|
||||
_shared->init(std::move(frame), _timeCorrection.trackTime);
|
||||
callReady();
|
||||
if (!_stream.queue.empty()) {
|
||||
queueReadFrames();
|
||||
|
@ -242,9 +267,14 @@ crl::time VideoTrackObject::currentFramePosition() const {
|
|||
}
|
||||
|
||||
bool VideoTrackObject::fillStateFromFrame() {
|
||||
_startedPosition = currentFramePosition();
|
||||
_nextFrameDisplayTime = _startedTime;
|
||||
return (_startedPosition != kTimeUnknown);
|
||||
Expects(_timeCorrection.trackTime == kTimeUnknown);
|
||||
|
||||
const auto position = currentFramePosition();
|
||||
if (position == kTimeUnknown) {
|
||||
return false;
|
||||
}
|
||||
_nextFrameDisplayTime = _timeCorrection.trackTime = position;
|
||||
return true;
|
||||
}
|
||||
|
||||
void VideoTrackObject::callReady() {
|
||||
|
@ -261,36 +291,30 @@ void VideoTrackObject::callReady() {
|
|||
data.cover = frame->original;
|
||||
data.rotation = _stream.rotation;
|
||||
data.state.duration = _stream.duration;
|
||||
data.state.position = _startedPosition;
|
||||
data.state.position = _timeCorrection.trackTime;
|
||||
data.state.receivedTill = _noMoreData
|
||||
? _stream.duration
|
||||
: _startedPosition;
|
||||
: _timeCorrection.trackTime;
|
||||
base::take(_ready)({ data });
|
||||
}
|
||||
|
||||
VideoTrackObject::TrackTime VideoTrackObject::trackTime() const {
|
||||
auto result = TrackTime();
|
||||
const auto started = (_startedTime != kTimeUnknown);
|
||||
if (!started) {
|
||||
result.worldNow = crl::now();
|
||||
result.trackNow = _startedPosition;
|
||||
result.worldNow = crl::now();
|
||||
if (!_timeCorrection) {
|
||||
result.trackNow = _timeCorrection.trackTime;
|
||||
return result;
|
||||
}
|
||||
|
||||
const auto correction = (_options.syncVideoByAudio && _audioId.playId())
|
||||
? Media::Player::mixer()->getVideoTimeCorrection(_audioId)
|
||||
: Media::Player::Mixer::TimeCorrection();
|
||||
const auto knownValue = correction
|
||||
? correction.audioPositionValue
|
||||
: _startedPosition;
|
||||
const auto knownTime = correction
|
||||
? correction.audioPositionTime
|
||||
: _startedTime;
|
||||
const auto worldNow = crl::now();
|
||||
const auto sinceKnown = (worldNow - knownTime);
|
||||
|
||||
result.worldNow = worldNow;
|
||||
result.trackNow = knownValue
|
||||
if (_options.syncVideoByAudio && _audioId.playId()) {
|
||||
const auto mixer = Media::Player::mixer();
|
||||
const auto correction = mixer->getVideoTimeCorrection(_audioId);
|
||||
if (correction && correction.worldTime > _resumedTime) {
|
||||
_timeCorrection = correction;
|
||||
}
|
||||
}
|
||||
const auto sinceKnown = (result.worldNow - _timeCorrection.worldTime);
|
||||
result.trackNow = _timeCorrection.trackTime
|
||||
+ crl::time(std::round(sinceKnown * _options.speed));
|
||||
return result;
|
||||
}
|
||||
|
@ -481,9 +505,15 @@ void VideoTrack::process(Packet &&packet) {
|
|||
void VideoTrack::waitForData() {
|
||||
}
|
||||
|
||||
void VideoTrack::start(crl::time startTime) {
|
||||
void VideoTrack::pause(crl::time time) {
|
||||
_wrapped.with([=](Implementation &unwrapped) {
|
||||
unwrapped.start(startTime);
|
||||
unwrapped.pause(time);
|
||||
});
|
||||
}
|
||||
|
||||
void VideoTrack::resume(crl::time time) {
|
||||
_wrapped.with([=](Implementation &unwrapped) {
|
||||
unwrapped.resume(time);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,11 @@ public:
|
|||
void waitForData();
|
||||
|
||||
// Called from the main thread.
|
||||
void start(crl::time startTime);
|
||||
// Must be called after 'ready' was invoked.
|
||||
void pause(crl::time time);
|
||||
void resume(crl::time time);
|
||||
|
||||
// Called from the main thread.
|
||||
// Returns the position of the displayed frame.
|
||||
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
|
||||
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
||||
|
|
Loading…
Reference in New Issue