mirror of https://github.com/procxx/kepka.git
Allow streaming videos with unknown duration.
When you stream image/gif as a soundless video the total duration is unknown, so we accumulate packet->pts + packet->duration as duration.
This commit is contained in:
parent
c655bf852f
commit
b65a24df96
|
@ -28,6 +28,7 @@ AudioTrack::AudioTrack(
|
||||||
, _error(std::move(error))
|
, _error(std::move(error))
|
||||||
, _playPosition(options.position) {
|
, _playPosition(options.position) {
|
||||||
Expects(_stream.duration > 1);
|
Expects(_stream.duration > 1);
|
||||||
|
Expects(_stream.duration != kDurationUnavailable); // Not supported.
|
||||||
Expects(_ready != nullptr);
|
Expects(_ready != nullptr);
|
||||||
Expects(_error != nullptr);
|
Expects(_error != nullptr);
|
||||||
Expects(_audioId.externalPlayId() != 0);
|
Expects(_audioId.externalPlayId() != 0);
|
||||||
|
@ -47,7 +48,9 @@ crl::time AudioTrack::streamDuration() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioTrack::process(Packet &&packet) {
|
void AudioTrack::process(Packet &&packet) {
|
||||||
_noMoreData = packet.empty();
|
if (packet.empty()) {
|
||||||
|
_readTillEnd = true;
|
||||||
|
}
|
||||||
if (initialized()) {
|
if (initialized()) {
|
||||||
mixerEnqueue(std::move(packet));
|
mixerEnqueue(std::move(packet));
|
||||||
} else if (!tryReadFirstFrame(std::move(packet))) {
|
} else if (!tryReadFirstFrame(std::move(packet))) {
|
||||||
|
@ -78,7 +81,7 @@ bool AudioTrack::tryReadFirstFrame(Packet &&packet) {
|
||||||
// Return the last valid frame if we seek too far.
|
// Return the last valid frame if we seek too far.
|
||||||
_stream.frame = std::move(_initialSkippingFrame);
|
_stream.frame = std::move(_initialSkippingFrame);
|
||||||
return processFirstFrame();
|
return processFirstFrame();
|
||||||
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
|
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
// Waiting for more packets.
|
// Waiting for more packets.
|
||||||
|
@ -139,7 +142,7 @@ void AudioTrack::callReady() {
|
||||||
auto data = AudioInformation();
|
auto data = AudioInformation();
|
||||||
data.state.duration = _stream.duration;
|
data.state.duration = _stream.duration;
|
||||||
data.state.position = _startedPosition;
|
data.state.position = _startedPosition;
|
||||||
data.state.receivedTill = _noMoreData
|
data.state.receivedTill = _readTillEnd
|
||||||
? _stream.duration
|
? _stream.duration
|
||||||
: _startedPosition;
|
: _startedPosition;
|
||||||
base::take(_ready)({ VideoInformation(), data });
|
base::take(_ready)({ VideoInformation(), data });
|
||||||
|
|
|
@ -65,7 +65,7 @@ private:
|
||||||
// Accessed from the same unspecified thread.
|
// Accessed from the same unspecified thread.
|
||||||
Stream _stream;
|
Stream _stream;
|
||||||
const AudioMsgId _audioId;
|
const AudioMsgId _audioId;
|
||||||
bool _noMoreData = false;
|
bool _readTillEnd = false;
|
||||||
|
|
||||||
// Assumed to be thread-safe.
|
// Assumed to be thread-safe.
|
||||||
FnMut<void(const Information &)> _ready;
|
FnMut<void(const Information &)> _ready;
|
||||||
|
|
|
@ -10,6 +10,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||||
namespace Media {
|
namespace Media {
|
||||||
|
|
||||||
constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min();
|
constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min();
|
||||||
|
constexpr auto kDurationMax = crl::time(std::numeric_limits<int>::max());
|
||||||
|
constexpr auto kDurationUnavailable = std::numeric_limits<crl::time>::max();
|
||||||
|
|
||||||
namespace Audio {
|
namespace Audio {
|
||||||
bool SupportsSpeedControl();
|
bool SupportsSpeedControl();
|
||||||
|
|
|
@ -135,13 +135,17 @@ Stream File::Context::initStream(
|
||||||
result.duration = (info->duration != AV_NOPTS_VALUE)
|
result.duration = (info->duration != AV_NOPTS_VALUE)
|
||||||
? PtsToTime(info->duration, result.timeBase)
|
? PtsToTime(info->duration, result.timeBase)
|
||||||
: PtsToTime(format->duration, kUniversalTimeBase);
|
: PtsToTime(format->duration, kUniversalTimeBase);
|
||||||
if (result.duration == kTimeUnknown || !result.duration) {
|
if (!result.duration) {
|
||||||
result.codec = nullptr;
|
result.codec = nullptr;
|
||||||
return result;
|
} else if (result.duration == kTimeUnknown) {
|
||||||
|
result.duration = kDurationUnavailable;
|
||||||
|
} else {
|
||||||
|
++result.duration;
|
||||||
|
if (result.duration > kDurationMax) {
|
||||||
|
result.duration = 0;
|
||||||
|
result.codec = nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// We want duration to be greater than any valid frame position.
|
|
||||||
// That way we can handle looping by advancing position by n * duration.
|
|
||||||
++result.duration;
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,6 +157,9 @@ void File::Context::seekToPosition(
|
||||||
|
|
||||||
if (!position) {
|
if (!position) {
|
||||||
return;
|
return;
|
||||||
|
} else if (stream.duration == kDurationUnavailable) {
|
||||||
|
// Seek in files with unknown duration is not supported.
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
//
|
//
|
||||||
// Non backward search reads the whole file if the position is after
|
// Non backward search reads the whole file if the position is after
|
||||||
|
|
|
@ -154,7 +154,7 @@ void Player::trackPlayedTill(
|
||||||
if (guard && position != kTimeUnknown) {
|
if (guard && position != kTimeUnknown) {
|
||||||
state.position = position;
|
state.position = position;
|
||||||
const auto value = _options.loop
|
const auto value = _options.loop
|
||||||
? (position % _totalDuration)
|
? (position % computeTotalDuration())
|
||||||
: position;
|
: position;
|
||||||
_updates.fire({ PlaybackUpdate<Track>{ value } });
|
_updates.fire({ PlaybackUpdate<Track>{ value } });
|
||||||
}
|
}
|
||||||
|
@ -179,7 +179,7 @@ void Player::trackSendReceivedTill(
|
||||||
state.receivedTill,
|
state.receivedTill,
|
||||||
_previousReceivedTill);
|
_previousReceivedTill);
|
||||||
const auto value = _options.loop
|
const auto value = _options.loop
|
||||||
? (receivedTill % _totalDuration)
|
? (receivedTill % computeTotalDuration())
|
||||||
: receivedTill;
|
: receivedTill;
|
||||||
_updates.fire({ PreloadedUpdate<Track>{ value } });
|
_updates.fire({ PreloadedUpdate<Track>{ value } });
|
||||||
}
|
}
|
||||||
|
@ -230,13 +230,17 @@ bool Player::fileReady(Stream &&video, Stream &&audio) {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
const auto mode = _options.mode;
|
const auto mode = _options.mode;
|
||||||
if (mode != Mode::Audio && mode != Mode::Both) {
|
if ((mode != Mode::Audio && mode != Mode::Both)
|
||||||
|
|| audio.duration == kDurationUnavailable) {
|
||||||
audio = Stream();
|
audio = Stream();
|
||||||
}
|
}
|
||||||
if (mode != Mode::Video && mode != Mode::Both) {
|
if (mode != Mode::Video && mode != Mode::Both) {
|
||||||
video = Stream();
|
video = Stream();
|
||||||
}
|
}
|
||||||
if (audio.codec) {
|
if (audio.duration == kDurationUnavailable) {
|
||||||
|
LOG(("Streaming Error: Audio stream with unknown duration."));
|
||||||
|
return false;
|
||||||
|
} else if (audio.codec) {
|
||||||
if (_options.audioId.audio() != nullptr) {
|
if (_options.audioId.audio() != nullptr) {
|
||||||
_audioId = AudioMsgId(
|
_audioId = AudioMsgId(
|
||||||
_options.audioId.audio(),
|
_options.audioId.audio(),
|
||||||
|
@ -278,6 +282,11 @@ bool Player::fileReady(Stream &&video, Stream &&audio) {
|
||||||
LOG(("Streaming Error: Required stream not found for mode %1."
|
LOG(("Streaming Error: Required stream not found for mode %1."
|
||||||
).arg(int(mode)));
|
).arg(int(mode)));
|
||||||
return false;
|
return false;
|
||||||
|
} else if (_audio
|
||||||
|
&& _video
|
||||||
|
&& _video->streamDuration() == kDurationUnavailable) {
|
||||||
|
LOG(("Streaming Error: Both streams with unknown video duration."));
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
_totalDuration = std::max(
|
_totalDuration = std::max(
|
||||||
_audio ? _audio->streamDuration() : kTimeUnknown,
|
_audio ? _audio->streamDuration() : kTimeUnknown,
|
||||||
|
@ -315,34 +324,43 @@ bool Player::fileProcessPacket(Packet &&packet) {
|
||||||
const auto index = native.stream_index;
|
const auto index = native.stream_index;
|
||||||
if (packet.empty()) {
|
if (packet.empty()) {
|
||||||
_readTillEnd = true;
|
_readTillEnd = true;
|
||||||
|
setDurationByPackets();
|
||||||
if (_audio) {
|
if (_audio) {
|
||||||
const auto till = _loopingShift + _audio->streamDuration();
|
const auto till = _loopingShift + computeAudioDuration();
|
||||||
crl::on_main(&_sessionGuard, [=] {
|
crl::on_main(&_sessionGuard, [=] {
|
||||||
audioReceivedTill(till);
|
audioReceivedTill(till);
|
||||||
});
|
});
|
||||||
_audio->process(Packet());
|
_audio->process(Packet());
|
||||||
}
|
}
|
||||||
if (_video) {
|
if (_video) {
|
||||||
const auto till = _loopingShift + _video->streamDuration();
|
const auto till = _loopingShift + computeVideoDuration();
|
||||||
crl::on_main(&_sessionGuard, [=] {
|
crl::on_main(&_sessionGuard, [=] {
|
||||||
videoReceivedTill(till);
|
videoReceivedTill(till);
|
||||||
});
|
});
|
||||||
_video->process(Packet());
|
_video->process(Packet());
|
||||||
}
|
}
|
||||||
} else if (_audio && _audio->streamIndex() == native.stream_index) {
|
} else if (_audio && _audio->streamIndex() == native.stream_index) {
|
||||||
|
accumulate_max(
|
||||||
|
_durationByLastAudioPacket,
|
||||||
|
durationByPacket(*_audio, packet));
|
||||||
|
|
||||||
const auto till = _loopingShift + std::clamp(
|
const auto till = _loopingShift + std::clamp(
|
||||||
PacketPosition(packet, _audio->streamTimeBase()),
|
PacketPosition(packet, _audio->streamTimeBase()),
|
||||||
crl::time(0),
|
crl::time(0),
|
||||||
_audio->streamDuration() - 1);
|
computeAudioDuration() - 1);
|
||||||
crl::on_main(&_sessionGuard, [=] {
|
crl::on_main(&_sessionGuard, [=] {
|
||||||
audioReceivedTill(till);
|
audioReceivedTill(till);
|
||||||
});
|
});
|
||||||
_audio->process(std::move(packet));
|
_audio->process(std::move(packet));
|
||||||
} else if (_video && _video->streamIndex() == native.stream_index) {
|
} else if (_video && _video->streamIndex() == native.stream_index) {
|
||||||
|
accumulate_max(
|
||||||
|
_durationByLastVideoPacket,
|
||||||
|
durationByPacket(*_video, packet));
|
||||||
|
|
||||||
const auto till = _loopingShift + std::clamp(
|
const auto till = _loopingShift + std::clamp(
|
||||||
PacketPosition(packet, _video->streamTimeBase()),
|
PacketPosition(packet, _video->streamTimeBase()),
|
||||||
crl::time(0),
|
crl::time(0),
|
||||||
_video->streamDuration() - 1);
|
computeVideoDuration() - 1);
|
||||||
crl::on_main(&_sessionGuard, [=] {
|
crl::on_main(&_sessionGuard, [=] {
|
||||||
videoReceivedTill(till);
|
videoReceivedTill(till);
|
||||||
});
|
});
|
||||||
|
@ -353,8 +371,15 @@ bool Player::fileProcessPacket(Packet &&packet) {
|
||||||
|
|
||||||
bool Player::fileReadMore() {
|
bool Player::fileReadMore() {
|
||||||
if (_options.loop && _readTillEnd) {
|
if (_options.loop && _readTillEnd) {
|
||||||
|
const auto duration = computeTotalDuration();
|
||||||
|
if (duration == kDurationUnavailable) {
|
||||||
|
LOG(("Streaming Error: "
|
||||||
|
"Couldn't find out the real stream duration."));
|
||||||
|
fileError(Error::InvalidData);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
_loopingShift += duration;
|
||||||
_readTillEnd = false;
|
_readTillEnd = false;
|
||||||
_loopingShift += _totalDuration;
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return !_readTillEnd && !_pauseReading;
|
return !_readTillEnd && !_pauseReading;
|
||||||
|
@ -373,6 +398,40 @@ void Player::streamFailed(Error error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Track>
|
||||||
|
int Player::durationByPacket(
|
||||||
|
const Track &track,
|
||||||
|
const Packet &packet) {
|
||||||
|
// We've set this value on the first cycle.
|
||||||
|
if (_loopingShift || _totalDuration != kDurationUnavailable) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
const auto result = DurationByPacket(packet, track.streamTimeBase());
|
||||||
|
if (result < 0) {
|
||||||
|
fileError(Error::InvalidData);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ensures(result > 0);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Player::setDurationByPackets() {
|
||||||
|
if (_loopingShift || _totalDuration != kDurationUnavailable) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const auto duration = std::max(
|
||||||
|
_durationByLastAudioPacket,
|
||||||
|
_durationByLastVideoPacket);
|
||||||
|
if (duration > 1) {
|
||||||
|
_durationByPackets = duration;
|
||||||
|
} else {
|
||||||
|
LOG(("Streaming Error: Bad total duration by packets: %1"
|
||||||
|
).arg(duration));
|
||||||
|
fileError(Error::InvalidData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void Player::provideStartInformation() {
|
void Player::provideStartInformation() {
|
||||||
Expects(_stage == Stage::Initializing);
|
Expects(_stage == Stage::Initializing);
|
||||||
|
|
||||||
|
@ -413,7 +472,7 @@ void Player::play(const PlaybackOptions &options) {
|
||||||
// Looping video with audio is not supported for now.
|
// Looping video with audio is not supported for now.
|
||||||
Expects(!options.loop || (options.mode != Mode::Both));
|
Expects(!options.loop || (options.mode != Mode::Both));
|
||||||
|
|
||||||
const auto previous = getCurrentReceivedTill();
|
const auto previous = getCurrentReceivedTill(computeTotalDuration());
|
||||||
|
|
||||||
stop();
|
stop();
|
||||||
_lastFailure = std::nullopt;
|
_lastFailure = std::nullopt;
|
||||||
|
@ -442,6 +501,43 @@ crl::time Player::loadInAdvanceFor() const {
|
||||||
return _remoteLoader ? kLoadInAdvanceForRemote : kLoadInAdvanceForLocal;
|
return _remoteLoader ? kLoadInAdvanceForRemote : kLoadInAdvanceForLocal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crl::time Player::computeTotalDuration() const {
|
||||||
|
if (_totalDuration != kDurationUnavailable) {
|
||||||
|
return _totalDuration;
|
||||||
|
} else if (const auto byPackets = _durationByPackets.load()) {
|
||||||
|
return byPackets;
|
||||||
|
}
|
||||||
|
return kDurationUnavailable;
|
||||||
|
}
|
||||||
|
|
||||||
|
crl::time Player::computeAudioDuration() const {
|
||||||
|
Expects(_audio != nullptr);
|
||||||
|
|
||||||
|
const auto result = _audio->streamDuration();
|
||||||
|
if (result != kDurationUnavailable) {
|
||||||
|
return result;
|
||||||
|
} else if ((_loopingShift || _readTillEnd)
|
||||||
|
&& _durationByLastAudioPacket) {
|
||||||
|
// We looped, so it already holds full stream duration.
|
||||||
|
return _durationByLastAudioPacket;
|
||||||
|
}
|
||||||
|
return kDurationUnavailable;
|
||||||
|
}
|
||||||
|
|
||||||
|
crl::time Player::computeVideoDuration() const {
|
||||||
|
Expects(_video != nullptr);
|
||||||
|
|
||||||
|
const auto result = _video->streamDuration();
|
||||||
|
if (result != kDurationUnavailable) {
|
||||||
|
return result;
|
||||||
|
} else if ((_loopingShift || _readTillEnd)
|
||||||
|
&& _durationByLastVideoPacket) {
|
||||||
|
// We looped, so it already holds full stream duration.
|
||||||
|
return _durationByLastVideoPacket;
|
||||||
|
}
|
||||||
|
return kDurationUnavailable;
|
||||||
|
}
|
||||||
|
|
||||||
void Player::pause() {
|
void Player::pause() {
|
||||||
Expects(active());
|
Expects(active());
|
||||||
|
|
||||||
|
@ -609,6 +705,9 @@ void Player::stop() {
|
||||||
_pauseReading = false;
|
_pauseReading = false;
|
||||||
_readTillEnd = false;
|
_readTillEnd = false;
|
||||||
_loopingShift = 0;
|
_loopingShift = 0;
|
||||||
|
_durationByPackets = 0;
|
||||||
|
_durationByLastAudioPacket = 0;
|
||||||
|
_durationByLastVideoPacket = 0;
|
||||||
_information = Information();
|
_information = Information();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -691,13 +790,17 @@ Media::Player::TrackState Player::prepareLegacyState() const {
|
||||||
result.position = std::max(
|
result.position = std::max(
|
||||||
_information.audio.state.position,
|
_information.audio.state.position,
|
||||||
_information.video.state.position);
|
_information.video.state.position);
|
||||||
|
result.length = computeTotalDuration();
|
||||||
if (result.position == kTimeUnknown) {
|
if (result.position == kTimeUnknown) {
|
||||||
result.position = _options.position;
|
result.position = _options.position;
|
||||||
} else if (_options.loop && _totalDuration > 0) {
|
} else if (_options.loop && result.length > 0) {
|
||||||
result.position %= _totalDuration;
|
result.position %= result.length;
|
||||||
}
|
}
|
||||||
result.receivedTill = _remoteLoader ? getCurrentReceivedTill() : 0;
|
result.receivedTill = _remoteLoader
|
||||||
result.length = _totalDuration;
|
? getCurrentReceivedTill(result.length)
|
||||||
|
: 0;
|
||||||
|
result.frequency = kMsFrequency;
|
||||||
|
|
||||||
if (result.length == kTimeUnknown) {
|
if (result.length == kTimeUnknown) {
|
||||||
const auto document = _options.audioId.audio();
|
const auto document = _options.audioId.audio();
|
||||||
const auto duration = document ? document->getDuration() : 0;
|
const auto duration = document ? document->getDuration() : 0;
|
||||||
|
@ -707,17 +810,16 @@ Media::Player::TrackState Player::prepareLegacyState() const {
|
||||||
result.length = std::max(crl::time(result.position), crl::time(0));
|
result.length = std::max(crl::time(result.position), crl::time(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.frequency = kMsFrequency;
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
crl::time Player::getCurrentReceivedTill() const {
|
crl::time Player::getCurrentReceivedTill(crl::time duration) const {
|
||||||
const auto previous = std::max(_previousReceivedTill, crl::time(0));
|
const auto previous = std::max(_previousReceivedTill, crl::time(0));
|
||||||
const auto result = std::min(
|
const auto result = std::min(
|
||||||
std::max(_information.audio.state.receivedTill, previous),
|
std::max(_information.audio.state.receivedTill, previous),
|
||||||
std::max(_information.video.state.receivedTill, previous));
|
std::max(_information.video.state.receivedTill, previous));
|
||||||
return (result >= 0 && _totalDuration > 1 && _options.loop)
|
return (result >= 0 && duration > 1 && _options.loop)
|
||||||
? (result % _totalDuration)
|
? (result % duration)
|
||||||
: result;
|
: result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -107,12 +107,21 @@ private:
|
||||||
[[nodiscard]] bool bothReceivedEnough(crl::time amount) const;
|
[[nodiscard]] bool bothReceivedEnough(crl::time amount) const;
|
||||||
[[nodiscard]] bool receivedTillEnd() const;
|
[[nodiscard]] bool receivedTillEnd() const;
|
||||||
void checkResumeFromWaitingForData();
|
void checkResumeFromWaitingForData();
|
||||||
[[nodiscard]] crl::time getCurrentReceivedTill() const;
|
[[nodiscard]] crl::time getCurrentReceivedTill(crl::time duration) const;
|
||||||
void savePreviousReceivedTill(
|
void savePreviousReceivedTill(
|
||||||
const PlaybackOptions &options,
|
const PlaybackOptions &options,
|
||||||
crl::time previousReceivedTill);
|
crl::time previousReceivedTill);
|
||||||
[[nodiscard]] crl::time loadInAdvanceFor() const;
|
[[nodiscard]] crl::time loadInAdvanceFor() const;
|
||||||
|
|
||||||
|
template <typename Track>
|
||||||
|
int durationByPacket(const Track &track, const Packet &packet);
|
||||||
|
|
||||||
|
// Valid after fileReady call ends. Thread-safe.
|
||||||
|
[[nodiscard]] crl::time computeAudioDuration() const;
|
||||||
|
[[nodiscard]] crl::time computeVideoDuration() const;
|
||||||
|
[[nodiscard]] crl::time computeTotalDuration() const;
|
||||||
|
void setDurationByPackets();
|
||||||
|
|
||||||
template <typename Track>
|
template <typename Track>
|
||||||
void trackReceivedTill(
|
void trackReceivedTill(
|
||||||
const Track &track,
|
const Track &track,
|
||||||
|
@ -170,6 +179,9 @@ private:
|
||||||
crl::time _totalDuration = kTimeUnknown;
|
crl::time _totalDuration = kTimeUnknown;
|
||||||
crl::time _loopingShift = 0;
|
crl::time _loopingShift = 0;
|
||||||
crl::time _previousReceivedTill = kTimeUnknown;
|
crl::time _previousReceivedTill = kTimeUnknown;
|
||||||
|
std::atomic<int> _durationByPackets = 0;
|
||||||
|
int _durationByLastAudioPacket = 0;
|
||||||
|
int _durationByLastVideoPacket = 0;
|
||||||
|
|
||||||
rpl::lifetime _lifetime;
|
rpl::lifetime _lifetime;
|
||||||
rpl::lifetime _sessionLifetime;
|
rpl::lifetime _sessionLifetime;
|
||||||
|
|
|
@ -289,6 +289,27 @@ crl::time PacketPosition(const Packet &packet, AVRational timeBase) {
|
||||||
timeBase);
|
timeBase);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crl::time PacketDuration(const Packet &packet, AVRational timeBase) {
|
||||||
|
return PtsToTime(packet.fields().duration, timeBase);
|
||||||
|
}
|
||||||
|
|
||||||
|
int DurationByPacket(const Packet &packet, AVRational timeBase) {
|
||||||
|
const auto position = PacketPosition(packet, timeBase);
|
||||||
|
const auto duration = std::max(
|
||||||
|
PacketDuration(packet, timeBase),
|
||||||
|
crl::time(1));
|
||||||
|
const auto bad = [](crl::time time) {
|
||||||
|
return (time < 0) || (time > kDurationMax);
|
||||||
|
};
|
||||||
|
if (bad(position) || bad(duration) || bad(position + duration + 1)) {
|
||||||
|
LOG(("Streaming Error: Wrong duration by packet: %1 + %2"
|
||||||
|
).arg(position
|
||||||
|
).arg(duration));
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return int(position + duration + 1);
|
||||||
|
}
|
||||||
|
|
||||||
crl::time FramePosition(const Stream &stream) {
|
crl::time FramePosition(const Stream &stream) {
|
||||||
const auto pts = !stream.frame
|
const auto pts = !stream.frame
|
||||||
? AV_NOPTS_VALUE
|
? AV_NOPTS_VALUE
|
||||||
|
@ -432,7 +453,7 @@ QImage ConvertFrame(
|
||||||
for (const auto y : ranges::view::ints(0, frame->height)) {
|
for (const auto y : ranges::view::ints(0, frame->height)) {
|
||||||
for (const auto x : ranges::view::ints(0, frame->width)) {
|
for (const auto x : ranges::view::ints(0, frame->width)) {
|
||||||
// Wipe out possible alpha values.
|
// Wipe out possible alpha values.
|
||||||
*to++ = 0x000000FFU | *from++;
|
*to++ = 0xFF000000U | *from++;
|
||||||
}
|
}
|
||||||
to += deltaTo;
|
to += deltaTo;
|
||||||
from += deltaFrom;
|
from += deltaFrom;
|
||||||
|
|
|
@ -191,6 +191,12 @@ void LogError(QLatin1String method, AvErrorWrap error);
|
||||||
[[nodiscard]] crl::time PacketPosition(
|
[[nodiscard]] crl::time PacketPosition(
|
||||||
const Packet &packet,
|
const Packet &packet,
|
||||||
AVRational timeBase);
|
AVRational timeBase);
|
||||||
|
[[nodiscard]] crl::time PacketDuration(
|
||||||
|
const Packet &packet,
|
||||||
|
AVRational timeBase);
|
||||||
|
[[nodiscard]] int DurationByPacket(
|
||||||
|
const Packet &packet,
|
||||||
|
AVRational timeBase);
|
||||||
[[nodiscard]] crl::time FramePosition(const Stream &stream);
|
[[nodiscard]] crl::time FramePosition(const Stream &stream);
|
||||||
[[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream);
|
[[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream);
|
||||||
[[nodiscard]] AVRational ValidateAspectRatio(AVRational aspect);
|
[[nodiscard]] AVRational ValidateAspectRatio(AVRational aspect);
|
||||||
|
|
|
@ -58,6 +58,7 @@ private:
|
||||||
FrameResult,
|
FrameResult,
|
||||||
Shared::PrepareNextCheck>;
|
Shared::PrepareNextCheck>;
|
||||||
|
|
||||||
|
void fail(Error error);
|
||||||
[[nodiscard]] bool interrupted() const;
|
[[nodiscard]] bool interrupted() const;
|
||||||
[[nodiscard]] bool tryReadFirstFrame(Packet &&packet);
|
[[nodiscard]] bool tryReadFirstFrame(Packet &&packet);
|
||||||
[[nodiscard]] bool fillStateFromFrame();
|
[[nodiscard]] bool fillStateFromFrame();
|
||||||
|
@ -68,7 +69,9 @@ private:
|
||||||
[[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
|
[[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
|
||||||
void presentFrameIfNeeded();
|
void presentFrameIfNeeded();
|
||||||
void callReady();
|
void callReady();
|
||||||
void loopAround();
|
[[nodiscard]] bool loopAround();
|
||||||
|
[[nodiscard]] crl::time computeDuration() const;
|
||||||
|
[[nodiscard]] int durationByPacket(const Packet &packet);
|
||||||
|
|
||||||
// Force frame position to be clamped to [0, duration] and monotonic.
|
// Force frame position to be clamped to [0, duration] and monotonic.
|
||||||
[[nodiscard]] crl::time currentFramePosition() const;
|
[[nodiscard]] crl::time currentFramePosition() const;
|
||||||
|
@ -84,13 +87,14 @@ private:
|
||||||
|
|
||||||
Stream _stream;
|
Stream _stream;
|
||||||
AudioMsgId _audioId;
|
AudioMsgId _audioId;
|
||||||
bool _noMoreData = false;
|
bool _readTillEnd = false;
|
||||||
FnMut<void(const Information &)> _ready;
|
FnMut<void(const Information &)> _ready;
|
||||||
Fn<void(Error)> _error;
|
Fn<void(Error)> _error;
|
||||||
crl::time _pausedTime = kTimeUnknown;
|
crl::time _pausedTime = kTimeUnknown;
|
||||||
crl::time _resumedTime = kTimeUnknown;
|
crl::time _resumedTime = kTimeUnknown;
|
||||||
|
int _durationByLastPacket = 0;
|
||||||
mutable TimePoint _syncTimePoint;
|
mutable TimePoint _syncTimePoint;
|
||||||
crl::time _framePositionShift = 0;
|
crl::time _loopingShift = 0;
|
||||||
rpl::event_stream<> _checkNextFrame;
|
rpl::event_stream<> _checkNextFrame;
|
||||||
rpl::event_stream<> _waitingForData;
|
rpl::event_stream<> _waitingForData;
|
||||||
FrameRequest _request;
|
FrameRequest _request;
|
||||||
|
@ -142,15 +146,39 @@ void VideoTrackObject::process(Packet &&packet) {
|
||||||
if (interrupted()) {
|
if (interrupted()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_noMoreData = packet.empty();
|
if (packet.empty()) {
|
||||||
|
_readTillEnd = true;
|
||||||
|
} else if (!_readTillEnd) {
|
||||||
|
accumulate_max(
|
||||||
|
_durationByLastPacket,
|
||||||
|
durationByPacket(packet));
|
||||||
|
if (interrupted()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
if (_shared->initialized()) {
|
if (_shared->initialized()) {
|
||||||
_stream.queue.push_back(std::move(packet));
|
_stream.queue.push_back(std::move(packet));
|
||||||
queueReadFrames();
|
queueReadFrames();
|
||||||
} else if (!tryReadFirstFrame(std::move(packet))) {
|
} else if (!tryReadFirstFrame(std::move(packet))) {
|
||||||
_error(Error::InvalidData);
|
fail(Error::InvalidData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int VideoTrackObject::durationByPacket(const Packet &packet) {
|
||||||
|
// We've set this value on the first cycle.
|
||||||
|
if (_loopingShift || _stream.duration != kDurationUnavailable) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
const auto result = DurationByPacket(packet, _stream.timeBase);
|
||||||
|
if (result < 0) {
|
||||||
|
fail(Error::InvalidData);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ensures(result > 0);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
void VideoTrackObject::queueReadFrames(crl::time delay) {
|
void VideoTrackObject::queueReadFrames(crl::time delay) {
|
||||||
if (delay > 0) {
|
if (delay > 0) {
|
||||||
_readFramesTimer.callOnce(delay);
|
_readFramesTimer.callOnce(delay);
|
||||||
|
@ -175,7 +203,9 @@ void VideoTrackObject::readFrames() {
|
||||||
|| result == FrameResult::Finished) {
|
|| result == FrameResult::Finished) {
|
||||||
presentFrameIfNeeded();
|
presentFrameIfNeeded();
|
||||||
} else if (result == FrameResult::Looped) {
|
} else if (result == FrameResult::Looped) {
|
||||||
time -= _stream.duration;
|
const auto duration = computeDuration();
|
||||||
|
Assert(duration != kDurationUnavailable);
|
||||||
|
time -= duration;
|
||||||
}
|
}
|
||||||
}, [&](Shared::PrepareNextCheck delay) {
|
}, [&](Shared::PrepareNextCheck delay) {
|
||||||
Expects(delay == kTimeUnknown || delay > 0);
|
Expects(delay == kTimeUnknown || delay > 0);
|
||||||
|
@ -211,25 +241,44 @@ auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoTrackObject::loopAround() {
|
bool VideoTrackObject::loopAround() {
|
||||||
|
const auto duration = computeDuration();
|
||||||
|
if (duration == kDurationUnavailable) {
|
||||||
|
LOG(("Streaming Error: "
|
||||||
|
"Couldn't find out the real video stream duration."));
|
||||||
|
return false;
|
||||||
|
}
|
||||||
avcodec_flush_buffers(_stream.codec.get());
|
avcodec_flush_buffers(_stream.codec.get());
|
||||||
_framePositionShift += _stream.duration;
|
_loopingShift += duration;
|
||||||
|
_readTillEnd = false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
crl::time VideoTrackObject::computeDuration() const {
|
||||||
|
if (_stream.duration != kDurationUnavailable) {
|
||||||
|
return _stream.duration;
|
||||||
|
} else if ((_loopingShift || _readTillEnd) && _durationByLastPacket) {
|
||||||
|
// We looped, so it already holds full stream duration.
|
||||||
|
return _durationByLastPacket;
|
||||||
|
}
|
||||||
|
return kDurationUnavailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
|
auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
|
||||||
if (const auto error = ReadNextFrame(_stream)) {
|
if (const auto error = ReadNextFrame(_stream)) {
|
||||||
if (error.code() == AVERROR_EOF) {
|
if (error.code() == AVERROR_EOF) {
|
||||||
if (_options.loop) {
|
if (!_options.loop) {
|
||||||
loopAround();
|
|
||||||
return FrameResult::Looped;
|
|
||||||
} else {
|
|
||||||
frame->position = kFinishedPosition;
|
frame->position = kFinishedPosition;
|
||||||
frame->displayed = kTimeUnknown;
|
frame->displayed = kTimeUnknown;
|
||||||
return FrameResult::Finished;
|
return FrameResult::Finished;
|
||||||
|
} else if (loopAround()) {
|
||||||
|
return FrameResult::Looped;
|
||||||
|
} else {
|
||||||
|
fail(Error::InvalidData);
|
||||||
|
return FrameResult::Error;
|
||||||
}
|
}
|
||||||
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
|
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
|
||||||
interrupt();
|
fail(Error::InvalidData);
|
||||||
_error(Error::InvalidData);
|
|
||||||
return FrameResult::Error;
|
return FrameResult::Error;
|
||||||
}
|
}
|
||||||
Assert(_stream.queue.empty());
|
Assert(_stream.queue.empty());
|
||||||
|
@ -238,8 +287,7 @@ auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
|
||||||
}
|
}
|
||||||
const auto position = currentFramePosition();
|
const auto position = currentFramePosition();
|
||||||
if (position == kTimeUnknown) {
|
if (position == kTimeUnknown) {
|
||||||
interrupt();
|
fail(Error::InvalidData);
|
||||||
_error(Error::InvalidData);
|
|
||||||
return FrameResult::Error;
|
return FrameResult::Error;
|
||||||
}
|
}
|
||||||
std::swap(frame->decoded, _stream.frame);
|
std::swap(frame->decoded, _stream.frame);
|
||||||
|
@ -264,8 +312,7 @@ void VideoTrackObject::presentFrameIfNeeded() {
|
||||||
std::move(frame->original));
|
std::move(frame->original));
|
||||||
if (frame->original.isNull()) {
|
if (frame->original.isNull()) {
|
||||||
frame->prepared = QImage();
|
frame->prepared = QImage();
|
||||||
interrupt();
|
fail(Error::InvalidData);
|
||||||
_error(Error::InvalidData);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -361,7 +408,7 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
|
||||||
// Return the last valid frame if we seek too far.
|
// Return the last valid frame if we seek too far.
|
||||||
_stream.frame = std::move(_initialSkippingFrame);
|
_stream.frame = std::move(_initialSkippingFrame);
|
||||||
return processFirstFrame();
|
return processFirstFrame();
|
||||||
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
|
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
// Waiting for more packets.
|
// Waiting for more packets.
|
||||||
|
@ -402,10 +449,10 @@ crl::time VideoTrackObject::currentFramePosition() const {
|
||||||
if (position == kTimeUnknown || position == kFinishedPosition) {
|
if (position == kTimeUnknown || position == kFinishedPosition) {
|
||||||
return kTimeUnknown;
|
return kTimeUnknown;
|
||||||
}
|
}
|
||||||
return _framePositionShift + std::clamp(
|
return _loopingShift + std::clamp(
|
||||||
position,
|
position,
|
||||||
crl::time(0),
|
crl::time(0),
|
||||||
_stream.duration - 1);
|
computeDuration() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VideoTrackObject::fillStateFromFrame() {
|
bool VideoTrackObject::fillStateFromFrame() {
|
||||||
|
@ -431,7 +478,7 @@ void VideoTrackObject::callReady() {
|
||||||
data.rotation = _stream.rotation;
|
data.rotation = _stream.rotation;
|
||||||
data.state.duration = _stream.duration;
|
data.state.duration = _stream.duration;
|
||||||
data.state.position = _syncTimePoint.trackTime;
|
data.state.position = _syncTimePoint.trackTime;
|
||||||
data.state.receivedTill = _noMoreData
|
data.state.receivedTill = _readTillEnd
|
||||||
? _stream.duration
|
? _stream.duration
|
||||||
: _syncTimePoint.trackTime;
|
: _syncTimePoint.trackTime;
|
||||||
base::take(_ready)({ data });
|
base::take(_ready)({ data });
|
||||||
|
@ -465,6 +512,11 @@ void VideoTrackObject::interrupt() {
|
||||||
_shared = nullptr;
|
_shared = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void VideoTrackObject::fail(Error error) {
|
||||||
|
interrupt();
|
||||||
|
_error(error);
|
||||||
|
}
|
||||||
|
|
||||||
void VideoTrack::Shared::init(QImage &&cover, crl::time position) {
|
void VideoTrack::Shared::init(QImage &&cover, crl::time position) {
|
||||||
Expects(!initialized());
|
Expects(!initialized());
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue