Packet queue instead of single packet is used in ffmpeg clip reader.

This commit is contained in:
John Preston 2016-07-05 20:43:30 +03:00
parent b61cc150db
commit 98fe307cbf
8 changed files with 163 additions and 90 deletions

View File

@ -27,9 +27,9 @@ namespace internal {
FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data) : ReaderImplementation(location, data) { FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, QByteArray *data) : ReaderImplementation(location, data) {
_frame = av_frame_alloc(); _frame = av_frame_alloc();
av_init_packet(&_avpkt); av_init_packet(&_packetNull);
_avpkt.data = NULL; _packetNull.data = nullptr;
_avpkt.size = 0; _packetNull.size = 0;
} }
bool FFMpegReaderImplementation::readNextFrame() { bool FFMpegReaderImplementation::readNextFrame() {
@ -38,57 +38,50 @@ bool FFMpegReaderImplementation::readNextFrame() {
_frameRead = false; _frameRead = false;
} }
int res;
while (true) { while (true) {
if (_avpkt.size > 0) { // previous packet not finished while (_packetQueue.isEmpty()) {
res = 0; auto packetResult = readPacket();
} else if ((res = av_read_frame(_fmtContext, &_avpkt)) < 0) { if (packetResult == PacketResult::Error) {
if (res != AVERROR_EOF || !_hadFrame) { return false;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 }; } else if (packetResult == PacketResult::EndOfFile) {
LOG(("Gif Error: Unable to av_read_frame() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res))); break;
}
}
bool eofReached = _packetQueue.isEmpty();
startPacket();
int got_frame = 0;
int decoded = 0;
auto packet = &_packetNull;
if (!_packetQueue.isEmpty()) {
packet = &_packetQueue.head();
decoded = packet->size;
}
int res = 0;
if ((res = avcodec_decode_video2(_codecContext, _frame, &got_frame, packet)) < 0) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to avcodec_decode_video2() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
if (res == AVERROR_INVALIDDATA) { // try to skip bad packet
finishPacket();
continue;
}
eofReached = (res == AVERROR_EOF);
if (!eofReached || !_hadFrame) { // try to skip end of file
return false; return false;
} }
} }
if (res > 0) decoded = res;
bool finished = (res < 0); if (!_packetQueue.isEmpty()) {
if (finished) { packet->data += decoded;
_avpkt.data = NULL; packet->size -= decoded;
_avpkt.size = 0; if (packet->size <= 0) {
} else { finishPacket();
rememberPacket();
}
int32 got_frame = 0;
int32 decoded = _avpkt.size;
if (_avpkt.stream_index == _streamId) {
if ((res = avcodec_decode_video2(_codecContext, _frame, &got_frame, &_avpkt)) < 0) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to avcodec_decode_video2() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
if (res == AVERROR_INVALIDDATA) { // try to skip bad packet
freePacket();
_avpkt.data = NULL;
_avpkt.size = 0;
continue;
}
if (res != AVERROR_EOF || !_hadFrame) { // try to skip end of file
return false;
}
freePacket();
_avpkt.data = NULL;
_avpkt.size = 0;
continue;
} }
if (res > 0) decoded = res;
} else if (_audioStreamId >= 0 && _avpkt.stream_index == _audioStreamId) {
freePacket();
continue;
}
if (!finished) {
_avpkt.data += decoded;
_avpkt.size -= decoded;
if (_avpkt.size <= 0) freePacket();
} }
if (got_frame) { if (got_frame) {
@ -110,7 +103,8 @@ bool FFMpegReaderImplementation::readNextFrame() {
return true; return true;
} }
if (finished) { if (eofReached) {
clearPacketQueue();
if ((res = avformat_seek_file(_fmtContext, _streamId, std::numeric_limits<int64_t>::min(), 0, std::numeric_limits<int64_t>::max(), 0)) < 0) { if ((res = avformat_seek_file(_fmtContext, _streamId, std::numeric_limits<int64_t>::min(), 0, std::numeric_limits<int64_t>::max(), 0)) < 0) {
if ((res = av_seek_frame(_fmtContext, _streamId, 0, AVSEEK_FLAG_BYTE)) < 0) { if ((res = av_seek_frame(_fmtContext, _streamId, 0, AVSEEK_FLAG_BYTE)) < 0) {
if ((res = av_seek_frame(_fmtContext, _streamId, 0, AVSEEK_FLAG_FRAME)) < 0) { if ((res = av_seek_frame(_fmtContext, _streamId, 0, AVSEEK_FLAG_FRAME)) < 0) {
@ -176,7 +170,7 @@ int FFMpegReaderImplementation::nextFrameDelay() {
return _currentFrameDelay; return _currentFrameDelay;
} }
bool FFMpegReaderImplementation::start(bool onlyGifv) { bool FFMpegReaderImplementation::start(Mode mode) {
initDevice(); initDevice();
if (!_device->open(QIODevice::ReadOnly)) { if (!_device->open(QIODevice::ReadOnly)) {
LOG(("Gif Error: Unable to open device %1").arg(logData())); LOG(("Gif Error: Unable to open device %1").arg(logData()));
@ -211,13 +205,14 @@ bool FFMpegReaderImplementation::start(bool onlyGifv) {
LOG(("Gif Error: Unable to av_find_best_stream %1, error %2, %3").arg(logData()).arg(_streamId).arg(av_make_error_string(err, sizeof(err), _streamId))); LOG(("Gif Error: Unable to av_find_best_stream %1, error %2, %3").arg(logData()).arg(_streamId).arg(av_make_error_string(err, sizeof(err), _streamId)));
return false; return false;
} }
_packetNull.stream_index = _streamId;
// Get a pointer to the codec context for the audio stream // Get a pointer to the codec context for the audio stream
_codecContext = _fmtContext->streams[_streamId]->codec; _codecContext = _fmtContext->streams[_streamId]->codec;
_codec = avcodec_find_decoder(_codecContext->codec_id); _codec = avcodec_find_decoder(_codecContext->codec_id);
_audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0); _audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0);
if (onlyGifv) { if (mode == Mode::OnlyGifv) {
if (_audioStreamId >= 0) { // should be no audio stream if (_audioStreamId >= 0) { // should be no audio stream
return false; return false;
} }
@ -227,6 +222,8 @@ bool FFMpegReaderImplementation::start(bool onlyGifv) {
if (_codecContext->codec_id != AV_CODEC_ID_H264) { if (_codecContext->codec_id != AV_CODEC_ID_H264) {
return false; return false;
} }
} else if (mode == Mode::Silent) {
_audioStreamId = -1;
} }
av_opt_set_int(_codecContext, "refcounted_frames", 1, 0); av_opt_set_int(_codecContext, "refcounted_frames", 1, 0);
if ((res = avcodec_open2(_codecContext, _codec, 0)) < 0) { if ((res = avcodec_open2(_codecContext, _codec, 0)) < 0) {
@ -261,23 +258,75 @@ FFMpegReaderImplementation::~FFMpegReaderImplementation() {
} }
if (_fmtContext) avformat_free_context(_fmtContext); if (_fmtContext) avformat_free_context(_fmtContext);
av_frame_free(&_frame); av_frame_free(&_frame);
freePacket();
clearPacketQueue();
} }
void FFMpegReaderImplementation::rememberPacket() { FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket() {
if (!_packetWas) { AVPacket packet;
_packetSize = _avpkt.size; av_init_packet(&packet);
_packetData = _avpkt.data; packet.data = nullptr;
_packetWas = true; packet.size = 0;
int res = 0;
if ((res = av_read_frame(_fmtContext, &packet)) < 0) {
if (res == AVERROR_EOF) {
return PacketResult::EndOfFile;
}
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to av_read_frame() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return PacketResult::Error;
}
bool videoPacket = (packet.stream_index == _streamId);
bool audioPacket = (_audioStreamId >= 0 && packet.stream_index == _audioStreamId);
if (audioPacket || videoPacket) {
//AVPacket packetForQueue;
//av_init_packet(&packetForQueue);
//if ((res = av_packet_ref(&packetForQueue, &packet)) < 0) {
// char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
// LOG(("Gif Error: Unable to av_packet_ref() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
// return PacketResult::Error;
//}
if (videoPacket) {
_packetQueue.enqueue(packet);
//_packetQueue.enqueue(packetForQueue);
} else if (audioPacket) {
// queue packet to audio player
// audioPlayer()->enqueuePacket(packet, &isEnough)
//av_packet_unref(&packetForQueue);
av_packet_unref(&packet);
}
} else {
av_packet_unref(&packet);
}
//av_packet_unref(&packet);
return PacketResult::Ok;
}
void FFMpegReaderImplementation::startPacket() {
if (!_packetStarted && !_packetQueue.isEmpty()) {
_packetStartedSize = _packetQueue.head().size;
_packetStartedData = _packetQueue.head().data;
_packetStarted = true;
} }
} }
void FFMpegReaderImplementation::freePacket() { void FFMpegReaderImplementation::finishPacket() {
if (_packetWas) { if (_packetStarted) {
_avpkt.size = _packetSize; _packetQueue.head().size = _packetStartedSize;
_avpkt.data = _packetData; _packetQueue.head().data = _packetStartedData;
_packetWas = false; _packetStarted = false;
av_packet_unref(&_avpkt); av_packet_unref(&_packetQueue.dequeue());
}
}
void FFMpegReaderImplementation::clearPacketQueue() {
finishPacket();
auto packets = createAndSwap(_packetQueue);
for (auto &packet : packets) {
av_packet_unref(&packet);
} }
} }

View File

@ -41,7 +41,7 @@ public:
bool readNextFrame() override; bool readNextFrame() override;
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override; bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
int nextFrameDelay() override; int nextFrameDelay() override;
bool start(bool onlyGifv) override; bool start(Mode mode) override;
int duration() const; int duration() const;
QString logData() const; QString logData() const;
@ -49,8 +49,15 @@ public:
~FFMpegReaderImplementation(); ~FFMpegReaderImplementation();
private: private:
void rememberPacket(); enum class PacketResult {
void freePacket(); Ok,
EndOfFile,
Error,
};
PacketResult readPacket();
void startPacket();
void finishPacket();
void clearPacketQueue();
static int _read(void *opaque, uint8_t *buf, int buf_size); static int _read(void *opaque, uint8_t *buf, int buf_size);
static int64_t _seek(void *opaque, int64_t offset, int whence); static int64_t _seek(void *opaque, int64_t offset, int whence);
@ -68,10 +75,11 @@ private:
int _audioStreamId = 0; int _audioStreamId = 0;
AVPacket _avpkt; QQueue<AVPacket> _packetQueue;
int _packetSize = 0; AVPacket _packetNull; // for final decoding
uint8_t *_packetData = nullptr; int _packetStartedSize = 0;
bool _packetWas = false; uint8_t *_packetStartedData = nullptr;
bool _packetStarted = false;
int _width = 0; int _width = 0;
int _height = 0; int _height = 0;

View File

@ -33,10 +33,15 @@ public:
: _location(location) : _location(location)
, _data(data) { , _data(data) {
} }
enum class Mode {
OnlyGifv,
Silent,
Normal,
};
virtual bool readNextFrame() = 0; virtual bool readNextFrame() = 0;
virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0; virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0;
virtual int nextFrameDelay() = 0; virtual int nextFrameDelay() = 0;
virtual bool start(bool onlyGifv) = 0; virtual bool start(Mode mode) = 0;
virtual ~ReaderImplementation() { virtual ~ReaderImplementation() {
} }
int64 dataSize() const { int64 dataSize() const {

View File

@ -70,8 +70,8 @@ int QtGifReaderImplementation::nextFrameDelay() {
return _frameDelay; return _frameDelay;
} }
bool QtGifReaderImplementation::start(bool onlyGifv) { bool QtGifReaderImplementation::start(Mode mode) {
if (onlyGifv) return false; if (mode == Mode::OnlyGifv) return false;
return jumpToStart(); return jumpToStart();
} }

View File

@ -34,7 +34,7 @@ public:
bool readNextFrame() override; bool readNextFrame() override;
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override; bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
int nextFrameDelay() override; int nextFrameDelay() override;
bool start(bool onlyGifv) override; bool start(Mode mode) override;
~QtGifReaderImplementation(); ~QtGifReaderImplementation();

View File

@ -287,10 +287,10 @@ Reader::~Reader() {
class ReaderPrivate { class ReaderPrivate {
public: public:
ReaderPrivate(Reader *reader, const FileLocation &location, const QByteArray &data) : _interface(reader) ReaderPrivate(Reader *reader, const FileLocation &location, const QByteArray &data) : _interface(reader)
, _data(data) , _mode(reader->mode())
, _location(_data.isEmpty() ? new FileLocation(location) : 0) { , _data(data)
, _location(_data.isEmpty() ? new FileLocation(location) : 0) {
if (_data.isEmpty() && !_location->accessEnable()) { if (_data.isEmpty() && !_location->accessEnable()) {
error(); error();
return; return;
@ -381,9 +381,17 @@ public:
} }
} }
_implementation = new internal::FFMpegReaderImplementation(_location, &_data); _implementation = std_::make_unique<internal::FFMpegReaderImplementation>(_location, &_data);
// _implementation = new QtGifReaderImplementation(_location, &_data); // _implementation = new QtGifReaderImplementation(_location, &_data);
return _implementation->start(false);
auto implementationMode = [this]() {
using ImplementationMode = internal::ReaderImplementation::Mode;
if (_mode == Reader::Mode::Gif) {
return ImplementationMode::Silent;
}
return ImplementationMode::Normal;
};
return _implementation->start(implementationMode());
} }
ProcessResult error() { ProcessResult error() {
@ -393,8 +401,7 @@ public:
} }
void stop() { void stop() {
delete _implementation; _implementation = nullptr;
_implementation = 0;
if (_location) { if (_location) {
if (_accessed) { if (_accessed) {
@ -409,21 +416,20 @@ public:
~ReaderPrivate() { ~ReaderPrivate() {
stop(); stop();
deleteAndMark(_location); deleteAndMark(_location);
deleteAndMark(_implementation);
_data.clear(); _data.clear();
} }
private: private:
Reader *_interface; Reader *_interface;
State _state = State::Reading; State _state = State::Reading;
Reader::Mode _mode;
QByteArray _data; QByteArray _data;
FileLocation *_location; FileLocation *_location;
bool _accessed = false; bool _accessed = false;
QBuffer _buffer; QBuffer _buffer;
internal::ReaderImplementation *_implementation = nullptr; std_::unique_ptr<internal::ReaderImplementation> _implementation;
FrameRequest _request; FrameRequest _request;
struct Frame { struct Frame {
@ -474,7 +480,7 @@ void Manager::start(Reader *reader) {
void Manager::update(Reader *reader) { void Manager::update(Reader *reader) {
QReadLocker lock(&_readerPointersMutex); QReadLocker lock(&_readerPointersMutex);
ReaderPointers::const_iterator i = _readerPointers.constFind(reader); auto i = _readerPointers.constFind(reader);
if (i == _readerPointers.cend()) { if (i == _readerPointers.cend()) {
lock.unlock(); lock.unlock();
@ -615,9 +621,9 @@ void Manager::process() {
uint64 ms = getms(), minms = ms + 86400 * 1000ULL; uint64 ms = getms(), minms = ms + 86400 * 1000ULL;
{ {
QReadLocker lock(&_readerPointersMutex); QReadLocker lock(&_readerPointersMutex);
for (ReaderPointers::iterator it = _readerPointers.begin(), e = _readerPointers.end(); it != e; ++it) { for (auto it = _readerPointers.begin(), e = _readerPointers.end(); it != e; ++it) {
if (it->v.loadAcquire()) { if (it->v.loadAcquire()) {
Readers::iterator i = _readers.find(it.key()->_private); auto i = _readers.find(it.key()->_private);
if (i == _readers.cend()) { if (i == _readers.cend()) {
_readers.insert(it.key()->_private, 0); _readers.insert(it.key()->_private, 0);
} else { } else {
@ -633,7 +639,7 @@ void Manager::process() {
} }
} }
for (Readers::iterator i = _readers.begin(), e = _readers.end(); i != e;) { for (auto i = _readers.begin(), e = _readers.end(); i != e;) {
ReaderPrivate *reader = i.key(); ReaderPrivate *reader = i.key();
if (i.value() <= ms) { if (i.value() <= ms) {
ResultHandleState state = handleResult(reader, reader->process(ms), ms); ResultHandleState state = handleResult(reader, reader->process(ms), ms);
@ -693,7 +699,7 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data
QByteArray localdata(data); QByteArray localdata(data);
auto reader = std_::make_unique<internal::FFMpegReaderImplementation>(&localloc, &localdata); auto reader = std_::make_unique<internal::FFMpegReaderImplementation>(&localloc, &localdata);
if (reader->start(true)) { if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv)) {
bool hasAlpha = false; bool hasAlpha = false;
if (reader->readNextFrame() && reader->renderFrame(cover, hasAlpha, QSize())) { if (reader->readNextFrame() && reader->renderFrame(cover, hasAlpha, QSize())) {
if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) { if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) {

View File

@ -101,6 +101,10 @@ public:
void stop(); void stop();
void error(); void error();
Mode mode() const {
return _mode;
}
~Reader(); ~Reader();
private: private:

View File

@ -1087,7 +1087,8 @@ void MediaView::displayDocument(DocumentData *doc, HistoryItem *item) { // empty
if (_doc->dimensions.width() && _doc->dimensions.height()) { if (_doc->dimensions.width() && _doc->dimensions.height()) {
_current = _doc->thumb->pixNoCache(_doc->dimensions.width(), _doc->dimensions.height(), ImagePixSmooth | ImagePixBlurred, _doc->dimensions.width(), _doc->dimensions.height()); _current = _doc->thumb->pixNoCache(_doc->dimensions.width(), _doc->dimensions.height(), ImagePixSmooth | ImagePixBlurred, _doc->dimensions.width(), _doc->dimensions.height());
} }
_gif = new Media::Clip::Reader(location, _doc->data(), func(this, &MediaView::clipCallback)); auto mode = _doc->isVideo() ? Media::Clip::Reader::Mode::Video : Media::Clip::Reader::Mode::Gif;
_gif = new Media::Clip::Reader(location, _doc->data(), func(this, &MediaView::clipCallback), mode);
} }
} else { } else {
if (QImageReader(location.name()).canRead()) { if (QImageReader(location.name()).canRead()) {