mirror of https://github.com/procxx/kepka.git
Improve queued by pts updates handling.
The updates are ordered by pts and applied in the correct order. Also some pts-dependent updates handling was moved to ApiWrap.
This commit is contained in:
parent
101ec9a1c1
commit
949104d879
|
@ -1530,4 +1530,121 @@ void ApiWrap::stickersSaveOrder() {
|
|||
}
|
||||
}
|
||||
|
||||
void ApiWrap::applyUpdatesNoPtsCheck(const MTPUpdates &updates) {
|
||||
switch (updates.type()) {
|
||||
case mtpc_updateShortMessage: {
|
||||
auto &d = updates.c_updateShortMessage();
|
||||
auto flags = mtpCastFlags(d.vflags.v) | MTPDmessage::Flag::f_from_id;
|
||||
App::histories().addNewMessage(MTP_message(MTP_flags(flags), d.vid, d.is_out() ? MTP_int(AuthSession::CurrentUserId()) : d.vuser_id, MTP_peerUser(d.is_out() ? d.vuser_id : MTP_int(AuthSession::CurrentUserId())), d.vfwd_from, d.vvia_bot_id, d.vreply_to_msg_id, d.vdate, d.vmessage, MTP_messageMediaEmpty(), MTPnullMarkup, d.has_entities() ? d.ventities : MTPnullEntities, MTPint(), MTPint()), NewMessageUnread);
|
||||
} break;
|
||||
|
||||
case mtpc_updateShortChatMessage: {
|
||||
auto &d = updates.c_updateShortChatMessage();
|
||||
auto flags = mtpCastFlags(d.vflags.v) | MTPDmessage::Flag::f_from_id;
|
||||
App::histories().addNewMessage(MTP_message(MTP_flags(flags), d.vid, d.vfrom_id, MTP_peerChat(d.vchat_id), d.vfwd_from, d.vvia_bot_id, d.vreply_to_msg_id, d.vdate, d.vmessage, MTP_messageMediaEmpty(), MTPnullMarkup, d.has_entities() ? d.ventities : MTPnullEntities, MTPint(), MTPint()), NewMessageUnread);
|
||||
} break;
|
||||
|
||||
case mtpc_updateShortSentMessage: {
|
||||
auto &d = updates.c_updateShortSentMessage();
|
||||
Q_UNUSED(d); // Sent message data was applied anyway.
|
||||
} break;
|
||||
|
||||
default: Unexpected("Type in applyUpdatesNoPtsCheck()");
|
||||
}
|
||||
}
|
||||
|
||||
void ApiWrap::applyUpdateNoPtsCheck(const MTPUpdate &update) {
|
||||
switch (update.type()) {
|
||||
case mtpc_updateNewMessage: {
|
||||
auto &d = update.c_updateNewMessage();
|
||||
auto needToAdd = true;
|
||||
if (d.vmessage.type() == mtpc_message) { // index forwarded messages to links _overview
|
||||
if (App::checkEntitiesAndViewsUpdate(d.vmessage.c_message())) { // already in blocks
|
||||
LOG(("Skipping message, because it is already in blocks!"));
|
||||
needToAdd = false;
|
||||
}
|
||||
}
|
||||
if (needToAdd) {
|
||||
App::histories().addNewMessage(d.vmessage, NewMessageUnread);
|
||||
}
|
||||
} break;
|
||||
|
||||
case mtpc_updateReadMessagesContents: {
|
||||
auto &d = update.c_updateReadMessagesContents();
|
||||
auto &v = d.vmessages.v;
|
||||
for (auto i = 0, l = v.size(); i < l; ++i) {
|
||||
if (auto item = App::histItemById(NoChannel, v.at(i).v)) {
|
||||
if (item->isMediaUnread()) {
|
||||
item->markMediaRead();
|
||||
Ui::repaintHistoryItem(item);
|
||||
|
||||
if (item->out() && item->history()->peer->isUser()) {
|
||||
auto when = App::main()->requestingDifference() ? 0 : unixtime();
|
||||
item->history()->peer->asUser()->madeAction(when);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
case mtpc_updateReadHistoryInbox: {
|
||||
auto &d = update.c_updateReadHistoryInbox();
|
||||
App::feedInboxRead(peerFromMTP(d.vpeer), d.vmax_id.v);
|
||||
} break;
|
||||
|
||||
case mtpc_updateReadHistoryOutbox: {
|
||||
auto &d = update.c_updateReadHistoryOutbox();
|
||||
auto peerId = peerFromMTP(d.vpeer);
|
||||
auto when = App::main()->requestingDifference() ? 0 : unixtime();
|
||||
App::feedOutboxRead(peerId, d.vmax_id.v, when);
|
||||
} break;
|
||||
|
||||
case mtpc_updateWebPage: {
|
||||
auto &d = update.c_updateWebPage();
|
||||
Q_UNUSED(d); // Web page was updated anyway.
|
||||
} break;
|
||||
|
||||
case mtpc_updateDeleteMessages: {
|
||||
auto &d = update.c_updateDeleteMessages();
|
||||
App::feedWereDeleted(NoChannel, d.vmessages.v);
|
||||
} break;
|
||||
|
||||
case mtpc_updateNewChannelMessage: {
|
||||
auto &d = update.c_updateNewChannelMessage();
|
||||
auto needToAdd = true;
|
||||
if (d.vmessage.type() == mtpc_message) { // index forwarded messages to links _overview
|
||||
if (App::checkEntitiesAndViewsUpdate(d.vmessage.c_message())) { // already in blocks
|
||||
LOG(("Skipping message, because it is already in blocks!"));
|
||||
needToAdd = false;
|
||||
}
|
||||
}
|
||||
if (needToAdd) {
|
||||
App::histories().addNewMessage(d.vmessage, NewMessageUnread);
|
||||
}
|
||||
} break;
|
||||
|
||||
case mtpc_updateEditChannelMessage: {
|
||||
auto &d = update.c_updateEditChannelMessage();
|
||||
App::updateEditedMessage(d.vmessage);
|
||||
} break;
|
||||
|
||||
case mtpc_updateEditMessage: {
|
||||
auto &d = update.c_updateEditMessage();
|
||||
App::updateEditedMessage(d.vmessage);
|
||||
} break;
|
||||
|
||||
case mtpc_updateChannelWebPage: {
|
||||
auto &d = update.c_updateChannelWebPage();
|
||||
Q_UNUSED(d); // Web page was updated anyway.
|
||||
} break;
|
||||
|
||||
case mtpc_updateDeleteChannelMessages: {
|
||||
auto &d = update.c_updateDeleteChannelMessages();
|
||||
App::feedWereDeleted(d.vchannel_id.v, d.vmessages.v);
|
||||
} break;
|
||||
|
||||
default: Unexpected("Type in applyUpdateNoPtsCheck()");
|
||||
}
|
||||
}
|
||||
|
||||
ApiWrap::~ApiWrap() = default;
|
||||
|
|
|
@ -91,6 +91,9 @@ public:
|
|||
|
||||
bool isQuitPrevent();
|
||||
|
||||
void applyUpdatesNoPtsCheck(const MTPUpdates &updates);
|
||||
void applyUpdateNoPtsCheck(const MTPUpdate &update);
|
||||
|
||||
~ApiWrap();
|
||||
|
||||
private:
|
||||
|
|
|
@ -1045,18 +1045,14 @@ void MainWidget::deleteHistoryAfterLeave(PeerData *peer, const MTPUpdates &updat
|
|||
void MainWidget::deleteHistoryPart(DeleteHistoryRequest request, const MTPmessages_AffectedHistory &result) {
|
||||
auto peer = request.peer;
|
||||
|
||||
const auto &d(result.c_messages_affectedHistory());
|
||||
auto &d = result.c_messages_affectedHistory();
|
||||
if (peer && peer->isChannel()) {
|
||||
if (peer->asChannel()->ptsUpdated(d.vpts.v, d.vpts_count.v)) {
|
||||
peer->asChannel()->ptsApplySkippedUpdates();
|
||||
}
|
||||
peer->asChannel()->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v);
|
||||
} else {
|
||||
if (ptsUpdated(d.vpts.v, d.vpts_count.v)) {
|
||||
ptsApplySkippedUpdates();
|
||||
}
|
||||
ptsUpdateAndApply(d.vpts.v, d.vpts_count.v);
|
||||
}
|
||||
|
||||
int32 offset = d.voffset.v;
|
||||
auto offset = d.voffset.v;
|
||||
if (offset <= 0) {
|
||||
cRefReportSpamStatuses().remove(peer->id);
|
||||
Local::writeReportSpamStatuses();
|
||||
|
@ -1148,15 +1144,13 @@ void MainWidget::deleteAllFromUser(ChannelData *channel, UserData *from) {
|
|||
}
|
||||
|
||||
void MainWidget::deleteAllFromUserPart(DeleteAllFromUserParams params, const MTPmessages_AffectedHistory &result) {
|
||||
const auto &d(result.c_messages_affectedHistory());
|
||||
if (params.channel->ptsUpdated(d.vpts.v, d.vpts_count.v)) {
|
||||
params.channel->ptsApplySkippedUpdates();
|
||||
}
|
||||
auto &d = result.c_messages_affectedHistory();
|
||||
params.channel->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v);
|
||||
|
||||
int32 offset = d.voffset.v;
|
||||
auto offset = d.voffset.v;
|
||||
if (offset > 0) {
|
||||
MTP::send(MTPchannels_DeleteUserHistory(params.channel->inputChannel, params.from->inputUser), rpcDone(&MainWidget::deleteAllFromUserPart, params));
|
||||
} else if (History *h = App::historyLoaded(params.channel)) {
|
||||
} else if (auto h = App::historyLoaded(params.channel)) {
|
||||
if (!h->lastMsg) {
|
||||
checkPeerHistory(params.channel);
|
||||
}
|
||||
|
@ -1714,17 +1708,14 @@ void MainWidget::readRequestDone(PeerData *peer) {
|
|||
}
|
||||
|
||||
void MainWidget::messagesAffected(PeerData *peer, const MTPmessages_AffectedMessages &result) {
|
||||
const auto &d(result.c_messages_affectedMessages());
|
||||
auto &d = result.c_messages_affectedMessages();
|
||||
if (peer && peer->isChannel()) {
|
||||
if (peer->asChannel()->ptsUpdated(d.vpts.v, d.vpts_count.v)) {
|
||||
peer->asChannel()->ptsApplySkippedUpdates();
|
||||
}
|
||||
peer->asChannel()->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v);
|
||||
} else {
|
||||
if (ptsUpdated(d.vpts.v, d.vpts_count.v)) {
|
||||
ptsApplySkippedUpdates();
|
||||
}
|
||||
ptsUpdateAndApply(d.vpts.v, d.vpts_count.v);
|
||||
}
|
||||
if (History *h = App::historyLoaded(peer ? peer->id : 0)) {
|
||||
|
||||
if (auto h = App::historyLoaded(peer ? peer->id : 0)) {
|
||||
if (!h->lastMsg) {
|
||||
checkPeerHistory(peer);
|
||||
}
|
||||
|
@ -3869,20 +3860,16 @@ void MainWidget::failDifferenceStartTimerFor(ChannelData *channel) {
|
|||
}
|
||||
}
|
||||
|
||||
bool MainWidget::ptsUpdated(int32 pts, int32 ptsCount) { // return false if need to save that update and apply later
|
||||
return _ptsWaiter.updated(0, pts, ptsCount);
|
||||
bool MainWidget::ptsUpdateAndApply(int32 pts, int32 ptsCount, const MTPUpdates &updates) {
|
||||
return _ptsWaiter.updateAndApply(nullptr, pts, ptsCount, updates);
|
||||
}
|
||||
|
||||
bool MainWidget::ptsUpdated(int32 pts, int32 ptsCount, const MTPUpdates &updates) {
|
||||
return _ptsWaiter.updated(0, pts, ptsCount, updates);
|
||||
bool MainWidget::ptsUpdateAndApply(int32 pts, int32 ptsCount, const MTPUpdate &update) {
|
||||
return _ptsWaiter.updateAndApply(nullptr, pts, ptsCount, update);
|
||||
}
|
||||
|
||||
bool MainWidget::ptsUpdated(int32 pts, int32 ptsCount, const MTPUpdate &update) {
|
||||
return _ptsWaiter.updated(0, pts, ptsCount, update);
|
||||
}
|
||||
|
||||
void MainWidget::ptsApplySkippedUpdates() {
|
||||
return _ptsWaiter.applySkippedUpdates(0);
|
||||
bool MainWidget::ptsUpdateAndApply(int32 pts, int32 ptsCount) {
|
||||
return _ptsWaiter.updateAndApply(nullptr, pts, ptsCount);
|
||||
}
|
||||
|
||||
void MainWidget::feedDifference(const MTPVector<MTPUser> &users, const MTPVector<MTPChat> &chats, const MTPVector<MTPMessage> &msgs, const MTPVector<MTPUpdate> &other) {
|
||||
|
@ -4760,20 +4747,14 @@ void MainWidget::feedUpdates(const MTPUpdates &updates, uint64 randomId) {
|
|||
MTP_LOG(0, ("getDifference { good - getting user for updateShortMessage }%1").arg(cTestMode() ? " TESTMODE" : ""));
|
||||
return getDifference();
|
||||
}
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, updates)) {
|
||||
return;
|
||||
if (ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, updates)) {
|
||||
// We could've added an item.
|
||||
// Better would be for history to be subscribed to new messages.
|
||||
_history->peerMessagesUpdated();
|
||||
|
||||
// Update date as well.
|
||||
updSetState(0, d.vdate.v, updQts, updSeq);
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
auto flags = mtpCastFlags(d.vflags.v) | MTPDmessage::Flag::f_from_id;
|
||||
auto item = App::histories().addNewMessage(MTP_message(MTP_flags(flags), d.vid, d.is_out() ? MTP_int(AuthSession::CurrentUserId()) : d.vuser_id, MTP_peerUser(d.is_out() ? d.vuser_id : MTP_int(AuthSession::CurrentUserId())), d.vfwd_from, d.vvia_bot_id, d.vreply_to_msg_id, d.vdate, d.vmessage, MTP_messageMediaEmpty(), MTPnullMarkup, d.has_entities() ? d.ventities : MTPnullEntities, MTPint(), MTPint()), NewMessageUnread);
|
||||
if (item) {
|
||||
_history->peerMessagesUpdated(item->history()->peer->id);
|
||||
}
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
|
||||
updSetState(0, d.vdate.v, updQts, updSeq);
|
||||
} break;
|
||||
|
||||
case mtpc_updateShortChatMessage: {
|
||||
|
@ -4788,20 +4769,14 @@ void MainWidget::feedUpdates(const MTPUpdates &updates, uint64 randomId) {
|
|||
if (noFrom && App::api()) App::api()->requestFullPeer(App::chatLoaded(d.vchat_id.v));
|
||||
return getDifference();
|
||||
}
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, updates)) {
|
||||
return;
|
||||
if (ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, updates)) {
|
||||
// We could've added an item.
|
||||
// Better would be for history to be subscribed to new messages.
|
||||
_history->peerMessagesUpdated();
|
||||
|
||||
// Update date as well.
|
||||
updSetState(0, d.vdate.v, updQts, updSeq);
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
auto flags = mtpCastFlags(d.vflags.v) | MTPDmessage::Flag::f_from_id;
|
||||
auto item = App::histories().addNewMessage(MTP_message(MTP_flags(flags), d.vid, d.vfrom_id, MTP_peerChat(d.vchat_id), d.vfwd_from, d.vvia_bot_id, d.vreply_to_msg_id, d.vdate, d.vmessage, MTP_messageMediaEmpty(), MTPnullMarkup, d.has_entities() ? d.ventities : MTPnullEntities, MTPint(), MTPint()), NewMessageUnread);
|
||||
if (item) {
|
||||
_history->peerMessagesUpdated(item->history()->peer->id);
|
||||
}
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
|
||||
updSetState(0, d.vdate.v, updQts, updSeq);
|
||||
} break;
|
||||
|
||||
case mtpc_updateShortSentMessage: {
|
||||
|
@ -4825,13 +4800,10 @@ void MainWidget::feedUpdates(const MTPUpdates &updates, uint64 randomId) {
|
|||
}
|
||||
}
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, updates)) {
|
||||
return;
|
||||
if (ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, updates)) {
|
||||
// Update date as well.
|
||||
updSetState(0, d.vdate.v, updQts, updSeq);
|
||||
}
|
||||
// update before applying skipped
|
||||
ptsApplySkippedUpdates();
|
||||
|
||||
updSetState(0, d.vdate.v, updQts, updSeq);
|
||||
} break;
|
||||
|
||||
case mtpc_updatesTooLong: {
|
||||
|
@ -4855,24 +4827,11 @@ void MainWidget::feedUpdate(const MTPUpdate &update) {
|
|||
return getDifference();
|
||||
}
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
if (ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update)) {
|
||||
// We could've added an item.
|
||||
// Better would be for history to be subscribed to new messages.
|
||||
_history->peerMessagesUpdated();
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
bool needToAdd = true;
|
||||
if (d.vmessage.type() == mtpc_message) { // index forwarded messages to links _overview
|
||||
if (App::checkEntitiesAndViewsUpdate(d.vmessage.c_message())) { // already in blocks
|
||||
LOG(("Skipping message, because it is already in blocks!"));
|
||||
needToAdd = false;
|
||||
}
|
||||
}
|
||||
if (needToAdd) {
|
||||
if (auto item = App::histories().addNewMessage(d.vmessage, NewMessageUnread)) {
|
||||
_history->peerMessagesUpdated(item->history()->peer->id);
|
||||
}
|
||||
}
|
||||
ptsApplySkippedUpdates();
|
||||
} break;
|
||||
|
||||
case mtpc_updateMessageID: {
|
||||
|
@ -4906,87 +4865,42 @@ void MainWidget::feedUpdate(const MTPUpdate &update) {
|
|||
|
||||
case mtpc_updateReadMessagesContents: {
|
||||
auto &d = update.c_updateReadMessagesContents();
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
auto &v = d.vmessages.v;
|
||||
for (int32 i = 0, l = v.size(); i < l; ++i) {
|
||||
if (auto item = App::histItemById(NoChannel, v.at(i).v)) {
|
||||
if (item->isMediaUnread()) {
|
||||
item->markMediaRead();
|
||||
Ui::repaintHistoryItem(item);
|
||||
|
||||
if (item->out() && item->history()->peer->isUser()) {
|
||||
auto when = requestingDifference() ? 0 : unixtime();
|
||||
item->history()->peer->asUser()->madeAction(when);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update);
|
||||
} break;
|
||||
|
||||
case mtpc_updateReadHistoryInbox: {
|
||||
auto &d = update.c_updateReadHistoryInbox();
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
App::feedInboxRead(peerFromMTP(d.vpeer), d.vmax_id.v);
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update);
|
||||
} break;
|
||||
|
||||
case mtpc_updateReadHistoryOutbox: {
|
||||
auto &d = update.c_updateReadHistoryOutbox();
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
auto peerId = peerFromMTP(d.vpeer);
|
||||
auto when = requestingDifference() ? 0 : unixtime();
|
||||
App::feedOutboxRead(peerId, d.vmax_id.v, when);
|
||||
if (_history->peer() && _history->peer()->id == peerId) {
|
||||
if (ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update)) {
|
||||
// We could've updated the double checks.
|
||||
// Better would be for history to be subscribed to outbox read events.
|
||||
_history->update();
|
||||
}
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
} break;
|
||||
|
||||
case mtpc_updateWebPage: {
|
||||
auto &d = update.c_updateWebPage();
|
||||
|
||||
// update web page anyway
|
||||
// Update web page anyway.
|
||||
App::feedWebPage(d.vwebpage);
|
||||
_history->updatePreview();
|
||||
webPagesOrGamesUpdate();
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
}
|
||||
ptsApplySkippedUpdates();
|
||||
ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update);
|
||||
} break;
|
||||
|
||||
case mtpc_updateDeleteMessages: {
|
||||
auto &d = update.c_updateDeleteMessages();
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
if (ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update)) {
|
||||
// We could've removed some items.
|
||||
// Better would be for history to be subscribed to removed messages.
|
||||
_history->peerMessagesUpdated();
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
App::feedWereDeleted(NoChannel, d.vmessages.v);
|
||||
_history->peerMessagesUpdated();
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
} break;
|
||||
|
||||
case mtpc_updateUserTyping: {
|
||||
|
@ -5260,26 +5174,17 @@ void MainWidget::feedUpdate(const MTPUpdate &update) {
|
|||
if (channel && !_handlingChannelDifference) {
|
||||
if (channel->ptsRequesting()) { // skip global updates while getting channel difference
|
||||
return;
|
||||
} else if (!channel->ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
} else if (channel->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update)) {
|
||||
// We could've added an item.
|
||||
// Better would be for history to be subscribed to new messages.
|
||||
_history->peerMessagesUpdated();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
App::api()->applyUpdateNoPtsCheck(update);
|
||||
|
||||
// update before applying skipped
|
||||
bool needToAdd = true;
|
||||
if (d.vmessage.type() == mtpc_message) { // index forwarded messages to links _overview
|
||||
if (App::checkEntitiesAndViewsUpdate(d.vmessage.c_message())) { // already in blocks
|
||||
LOG(("Skipping message, because it is already in blocks!"));
|
||||
needToAdd = false;
|
||||
}
|
||||
}
|
||||
if (needToAdd) {
|
||||
if (auto item = App::histories().addNewMessage(d.vmessage, NewMessageUnread)) {
|
||||
_history->peerMessagesUpdated(item->history()->peer->id);
|
||||
}
|
||||
}
|
||||
if (channel && !_handlingChannelDifference) {
|
||||
channel->ptsApplySkippedUpdates();
|
||||
// We could've added an item.
|
||||
// Better would be for history to be subscribed to new messages.
|
||||
_history->peerMessagesUpdated();
|
||||
}
|
||||
} break;
|
||||
|
||||
|
@ -5290,30 +5195,17 @@ void MainWidget::feedUpdate(const MTPUpdate &update) {
|
|||
if (channel && !_handlingChannelDifference) {
|
||||
if (channel->ptsRequesting()) { // skip global updates while getting channel difference
|
||||
return;
|
||||
} else if (!channel->ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
} else {
|
||||
channel->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update);
|
||||
}
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
App::updateEditedMessage(d.vmessage);
|
||||
|
||||
if (channel && !_handlingChannelDifference) {
|
||||
channel->ptsApplySkippedUpdates();
|
||||
} else {
|
||||
App::api()->applyUpdateNoPtsCheck(update);
|
||||
}
|
||||
} break;
|
||||
|
||||
case mtpc_updateEditMessage: {
|
||||
auto &d = update.c_updateEditMessage();
|
||||
|
||||
if (!ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// update before applying skipped
|
||||
App::updateEditedMessage(d.vmessage);
|
||||
|
||||
ptsApplySkippedUpdates();
|
||||
ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update);
|
||||
} break;
|
||||
|
||||
case mtpc_updateChannelPinnedMessage: {
|
||||
|
@ -5356,13 +5248,11 @@ void MainWidget::feedUpdate(const MTPUpdate &update) {
|
|||
if (channel && !_handlingChannelDifference) {
|
||||
if (channel->ptsRequesting()) { // skip global updates while getting channel difference
|
||||
return;
|
||||
} else if (!channel->ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
} else {
|
||||
channel->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update);
|
||||
}
|
||||
}
|
||||
|
||||
if (channel && !_handlingChannelDifference) {
|
||||
channel->ptsApplySkippedUpdates();
|
||||
} else {
|
||||
App::api()->applyUpdateNoPtsCheck(update);
|
||||
}
|
||||
} break;
|
||||
|
||||
|
@ -5373,17 +5263,17 @@ void MainWidget::feedUpdate(const MTPUpdate &update) {
|
|||
if (channel && !_handlingChannelDifference) {
|
||||
if (channel->ptsRequesting()) { // skip global updates while getting channel difference
|
||||
return;
|
||||
} else if (!channel->ptsUpdated(d.vpts.v, d.vpts_count.v, update)) {
|
||||
return;
|
||||
} else if (channel->ptsUpdateAndApply(d.vpts.v, d.vpts_count.v, update)) {
|
||||
// We could've removed some items.
|
||||
// Better would be for history to be subscribed to removed messages.
|
||||
_history->peerMessagesUpdated();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// We could've removed some items.
|
||||
// Better would be for history to be subscribed to removed messages.
|
||||
_history->peerMessagesUpdated();
|
||||
|
||||
// update before applying skipped
|
||||
App::feedWereDeleted(d.vchannel_id.v, d.vmessages.v);
|
||||
_history->peerMessagesUpdated();
|
||||
|
||||
if (channel && !_handlingChannelDifference) {
|
||||
channel->ptsApplySkippedUpdates();
|
||||
App::api()->applyUpdateNoPtsCheck(update);
|
||||
}
|
||||
} break;
|
||||
|
||||
|
|
|
@ -376,6 +376,12 @@ public:
|
|||
void gotRangeDifference(ChannelData *channel, const MTPupdates_ChannelDifference &diff);
|
||||
void onSelfParticipantUpdated(ChannelData *channel);
|
||||
|
||||
// Mayde public for ApiWrap, while it is still here.
|
||||
// Better would be for this to be moved to ApiWrap.
|
||||
bool requestingDifference() const {
|
||||
return _ptsWaiter.requesting();
|
||||
}
|
||||
|
||||
bool contentOverlapped(const QRect &globalRect);
|
||||
|
||||
void documentLoadProgress(DocumentData *document);
|
||||
|
@ -584,13 +590,9 @@ private:
|
|||
QPoint getFloatPlayerHiddenPosition(QPoint position, QSize size, RectPart side) const;
|
||||
RectPart getFloatPlayerSide(QPoint center) const;
|
||||
|
||||
bool ptsUpdated(int32 pts, int32 ptsCount);
|
||||
bool ptsUpdated(int32 pts, int32 ptsCount, const MTPUpdates &updates);
|
||||
bool ptsUpdated(int32 pts, int32 ptsCount, const MTPUpdate &update);
|
||||
void ptsApplySkippedUpdates();
|
||||
bool requestingDifference() const {
|
||||
return _ptsWaiter.requesting();
|
||||
}
|
||||
bool ptsUpdateAndApply(int32 pts, int32 ptsCount, const MTPUpdates &updates);
|
||||
bool ptsUpdateAndApply(int32 pts, int32 ptsCount, const MTPUpdate &update);
|
||||
bool ptsUpdateAndApply(int32 pts, int32 ptsCount);
|
||||
bool getDifferenceTimeChanged(ChannelData *channel, int32 ms, ChannelGetDifferenceTime &channelCurTime, TimeMs &curTime);
|
||||
|
||||
void viewsIncrementDone(QVector<MTPint> ids, const MTPVector<MTPint> &result, mtpRequestId req);
|
||||
|
|
|
@ -983,8 +983,8 @@ void ChannelData::setRestrictedRights(const MTPChannelBannedRights &rights) {
|
|||
Notify::peerUpdatedDelayed(this, UpdateFlag::ChannelRightsChanged | UpdateFlag::AdminsChanged | UpdateFlag::BannedUsersChanged);
|
||||
}
|
||||
|
||||
uint64 PtsWaiter::ptsKey(PtsSkippedQueue queue) {
|
||||
return _queue.insert(uint64(uint32(_last)) << 32 | uint64(uint32(_count)), queue).key();
|
||||
uint64 PtsWaiter::ptsKey(PtsSkippedQueue queue, int32 pts) {
|
||||
return _queue.insert(uint64(uint32(pts)) << 32 | (++_skippedKey), queue).key();
|
||||
}
|
||||
|
||||
void PtsWaiter::setWaitingForSkipped(ChannelData *channel, int32 ms) {
|
||||
|
@ -1022,13 +1022,13 @@ void PtsWaiter::applySkippedUpdates(ChannelData *channel) {
|
|||
|
||||
setWaitingForSkipped(channel, -1);
|
||||
|
||||
if (!App::main() || _queue.isEmpty()) return;
|
||||
if (!App::api() || _queue.isEmpty()) return;
|
||||
|
||||
++_applySkippedLevel;
|
||||
for (QMap<uint64, PtsSkippedQueue>::const_iterator i = _queue.cbegin(), e = _queue.cend(); i != e; ++i) {
|
||||
switch (i.value()) {
|
||||
case SkippedUpdate: App::main()->feedUpdate(_updateQueue.value(i.key())); break;
|
||||
case SkippedUpdates: App::main()->feedUpdates(_updatesQueue.value(i.key())); break;
|
||||
case SkippedUpdate: App::api()->applyUpdateNoPtsCheck(_updateQueue.value(i.key())); break;
|
||||
case SkippedUpdates: App::api()->applyUpdatesNoPtsCheck(_updatesQueue.value(i.key())); break;
|
||||
}
|
||||
}
|
||||
--_applySkippedLevel;
|
||||
|
@ -1042,15 +1042,6 @@ void PtsWaiter::clearSkippedUpdates() {
|
|||
_applySkippedLevel = 0;
|
||||
}
|
||||
|
||||
bool PtsWaiter::updated(ChannelData *channel, int32 pts, int32 count) {
|
||||
if (_requesting || _applySkippedLevel) {
|
||||
return true;
|
||||
} else if (pts <= _good && count > 0) {
|
||||
return false;
|
||||
}
|
||||
return check(channel, pts, count);
|
||||
}
|
||||
|
||||
bool PtsWaiter::updated(ChannelData *channel, int32 pts, int32 count, const MTPUpdates &updates) {
|
||||
if (_requesting || _applySkippedLevel) {
|
||||
return true;
|
||||
|
@ -1059,7 +1050,7 @@ bool PtsWaiter::updated(ChannelData *channel, int32 pts, int32 count, const MTPU
|
|||
} else if (check(channel, pts, count)) {
|
||||
return true;
|
||||
}
|
||||
_updatesQueue.insert(ptsKey(SkippedUpdates), updates);
|
||||
_updatesQueue.insert(ptsKey(SkippedUpdates, pts), updates);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1071,10 +1062,55 @@ bool PtsWaiter::updated(ChannelData *channel, int32 pts, int32 count, const MTPU
|
|||
} else if (check(channel, pts, count)) {
|
||||
return true;
|
||||
}
|
||||
_updateQueue.insert(ptsKey(SkippedUpdate), update);
|
||||
_updateQueue.insert(ptsKey(SkippedUpdate, pts), update);
|
||||
return false;
|
||||
}
|
||||
|
||||
bool PtsWaiter::updated(ChannelData *channel, int32 pts, int32 count) {
|
||||
if (_requesting || _applySkippedLevel) {
|
||||
return true;
|
||||
} else if (pts <= _good && count > 0) {
|
||||
return false;
|
||||
}
|
||||
return check(channel, pts, count);
|
||||
}
|
||||
|
||||
bool PtsWaiter::updateAndApply(ChannelData *channel, int32 pts, int32 count, const MTPUpdates &updates) {
|
||||
if (!updated(channel, pts, count, updates)) {
|
||||
return false;
|
||||
}
|
||||
if (!_waitingForSkipped || _queue.isEmpty()) {
|
||||
// Optimization - no need to put in queue and back.
|
||||
App::api()->applyUpdatesNoPtsCheck(updates);
|
||||
} else {
|
||||
_updatesQueue.insert(ptsKey(SkippedUpdates, pts), updates);
|
||||
applySkippedUpdates(channel);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PtsWaiter::updateAndApply(ChannelData *channel, int32 pts, int32 count, const MTPUpdate &update) {
|
||||
if (!updated(channel, pts, count, update)) {
|
||||
return false;
|
||||
}
|
||||
if (!_waitingForSkipped || _queue.isEmpty()) {
|
||||
// Optimization - no need to put in queue and back.
|
||||
App::api()->applyUpdateNoPtsCheck(update);
|
||||
} else {
|
||||
_updateQueue.insert(ptsKey(SkippedUpdate, pts), update);
|
||||
applySkippedUpdates(channel);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PtsWaiter::updateAndApply(ChannelData *channel, int32 pts, int32 count) {
|
||||
if (!updated(channel, pts, count)) {
|
||||
return false;
|
||||
}
|
||||
applySkippedUpdates(channel);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PtsWaiter::check(ChannelData *channel, int32 pts, int32 count) { // return false if need to save that update and apply later
|
||||
if (!inited()) {
|
||||
init(pts);
|
||||
|
|
|
@ -696,15 +696,18 @@ public:
|
|||
int32 current() const{
|
||||
return _good;
|
||||
}
|
||||
bool updated(ChannelData *channel, int32 pts, int32 count);
|
||||
bool updated(ChannelData *channel, int32 pts, int32 count, const MTPUpdates &updates);
|
||||
bool updated(ChannelData *channel, int32 pts, int32 count, const MTPUpdate &update);
|
||||
bool updated(ChannelData *channel, int32 pts, int32 count);
|
||||
bool updateAndApply(ChannelData *channel, int32 pts, int32 count, const MTPUpdates &updates);
|
||||
bool updateAndApply(ChannelData *channel, int32 pts, int32 count, const MTPUpdate &update);
|
||||
bool updateAndApply(ChannelData *channel, int32 pts, int32 count);
|
||||
void applySkippedUpdates(ChannelData *channel);
|
||||
void clearSkippedUpdates();
|
||||
|
||||
private:
|
||||
bool check(ChannelData *channel, int32 pts, int32 count); // return false if need to save that update and apply later
|
||||
uint64 ptsKey(PtsSkippedQueue queue);
|
||||
uint64 ptsKey(PtsSkippedQueue queue, int32 pts);
|
||||
void checkForWaiting(ChannelData *channel);
|
||||
QMap<uint64, PtsSkippedQueue> _queue;
|
||||
QMap<uint64, MTPUpdate> _updateQueue;
|
||||
|
@ -712,6 +715,7 @@ private:
|
|||
int32 _good, _last, _count;
|
||||
int32 _applySkippedLevel;
|
||||
bool _requesting, _waitingForSkipped, _waitingForShortPoll;
|
||||
uint32 _skippedKey = 0;
|
||||
};
|
||||
|
||||
struct MegagroupInfo {
|
||||
|
@ -938,15 +942,16 @@ public:
|
|||
_ptsWaiter.init(pts);
|
||||
}
|
||||
void ptsReceived(int32 pts) {
|
||||
if (_ptsWaiter.updated(this, pts, 0)) {
|
||||
_ptsWaiter.applySkippedUpdates(this);
|
||||
}
|
||||
_ptsWaiter.updateAndApply(this, pts, 0);
|
||||
}
|
||||
bool ptsUpdated(int32 pts, int32 count) {
|
||||
return _ptsWaiter.updated(this, pts, count);
|
||||
bool ptsUpdateAndApply(int32 pts, int32 count) {
|
||||
return _ptsWaiter.updateAndApply(this, pts, count);
|
||||
}
|
||||
bool ptsUpdated(int32 pts, int32 count, const MTPUpdate &update) {
|
||||
return _ptsWaiter.updated(this, pts, count, update);
|
||||
bool ptsUpdateAndApply(int32 pts, int32 count, const MTPUpdate &update) {
|
||||
return _ptsWaiter.updateAndApply(this, pts, count, update);
|
||||
}
|
||||
bool ptsUpdateAndApply(int32 pts, int32 count, const MTPUpdates &updates) {
|
||||
return _ptsWaiter.updateAndApply(this, pts, count, updates);
|
||||
}
|
||||
int32 pts() const {
|
||||
return _ptsWaiter.current();
|
||||
|
@ -960,9 +965,6 @@ public:
|
|||
void ptsSetRequesting(bool isRequesting) {
|
||||
return _ptsWaiter.setRequesting(isRequesting);
|
||||
}
|
||||
void ptsApplySkippedUpdates() {
|
||||
return _ptsWaiter.applySkippedUpdates(this);
|
||||
}
|
||||
void ptsWaitingForShortPoll(int32 ms) { // < 0 - not waiting
|
||||
return _ptsWaiter.setWaitingForShortPoll(this, ms);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue